diff --git a/.github/workflows/ci_e2e.yml b/.github/workflows/ci_e2e.yml index cab1eeda61..1aa5ddf780 100644 --- a/.github/workflows/ci_e2e.yml +++ b/.github/workflows/ci_e2e.yml @@ -45,7 +45,7 @@ jobs: sh ./docker/build/hooks/build - name: Docker Run run: | - export VERSION=$(cat $(pwd)/pom.xml | grep '' -m 1 | awk '{print $1}' | sed 's///' | sed 's/<\/version>//') + export VERSION=$(cat $(pwd)/pom.xml | grep '' -m 1 | awk '{print $1}' | sed 's///' | sed 's/<\/revision>//') sed -i "s/apache\/dolphinscheduler:latest/apache\/dolphinscheduler:${VERSION}/g" $(pwd)/docker/docker-swarm/docker-compose.yml docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml up -d - name: Check Server Status diff --git a/docker/build/hooks/build b/docker/build/hooks/build index ce7362bf55..40bf74d2b6 100755 --- a/docker/build/hooks/build +++ b/docker/build/hooks/build @@ -24,7 +24,7 @@ printenv if [ -z "${VERSION}" ] then echo "set default environment variable [VERSION]" - VERSION=$(grep '' -m 1 "$(pwd)"/pom.xml | awk '{print $1}' | sed 's///' | sed 's/<\/version>//') + VERSION=$(grep '' -m 1 "$(pwd)"/pom.xml | awk '{print $1}' | sed 's///' | sed 's/<\/revision>//') export VERSION fi diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml index 930b92f855..480b7f9665 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java index 0a6851a4ac..b0f07b6f88 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java @@ -17,6 +17,11 @@ package org.apache.dolphinscheduler.plugin.alert.dingtalk; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES; + import org.apache.dolphinscheduler.spi.alert.AlertChannel; import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; import org.apache.dolphinscheduler.spi.params.InputParam; @@ -52,9 +57,9 @@ public class DingTalkAlertChannelFactory implements AlertChannelFactory { .build(); RadioParam isEnableProxy = RadioParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE) - .addParamsOptions(new ParamsOptions("YES", true, false)) - .addParamsOptions(new ParamsOptions("NO", false, false)) - .setValue(true) + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_TRUE) .addValidate(Validate.newBuilder() .setRequired(false) .build()) diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml index f4524b2046..a6c333b5a3 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java index f8aa17fca3..ba71d790aa 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java @@ -17,6 +17,11 @@ package org.apache.dolphinscheduler.plugin.alert.email; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES; + import org.apache.dolphinscheduler.spi.alert.AlertChannel; import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; import org.apache.dolphinscheduler.spi.alert.AlertConstants; @@ -72,9 +77,9 @@ public class EmailAlertChannelFactory implements AlertChannelFactory { .build(); RadioParam enableSmtpAuth = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_AUTH, MailParamsConstants.MAIL_SMTP_AUTH) - .addParamsOptions(new ParamsOptions("YES", true, false)) - .addParamsOptions(new ParamsOptions("NO", false, false)) - .setValue(true) + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_TRUE) .addValidate(Validate.newBuilder().setRequired(true).build()) .build(); @@ -87,16 +92,16 @@ public class EmailAlertChannelFactory implements AlertChannelFactory { .build(); RadioParam enableTls = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_STARTTLS_ENABLE, MailParamsConstants.MAIL_SMTP_STARTTLS_ENABLE) - .addParamsOptions(new ParamsOptions("YES", true, false)) - .addParamsOptions(new ParamsOptions("NO", false, false)) - .setValue(false) + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_FALSE) .addValidate(Validate.newBuilder().setRequired(true).build()) .build(); RadioParam enableSsl = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_SSL_ENABLE, MailParamsConstants.MAIL_SMTP_SSL_ENABLE) - .addParamsOptions(new ParamsOptions("YES", true, false)) - .addParamsOptions(new ParamsOptions("NO", false, false)) - .setValue(false) + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_FALSE) .addValidate(Validate.newBuilder().setRequired(true).build()) .build(); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/pom.xml index 44d4cdbb07..927c5b850a 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java index 0e863f95d4..d85b4233a5 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java @@ -17,6 +17,11 @@ package org.apache.dolphinscheduler.plugin.alert.feishu; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES; + import org.apache.dolphinscheduler.spi.alert.AlertChannel; import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; import org.apache.dolphinscheduler.spi.params.InputParam; @@ -44,9 +49,9 @@ public class FeiShuAlertChannelFactory implements AlertChannelFactory { .build(); RadioParam isEnableProxy = RadioParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_PROXY_ENABLE, FeiShuParamsConstants.NAME_FEI_SHU_PROXY_ENABLE) - .addParamsOptions(new ParamsOptions("YES", true, false)) - .addParamsOptions(new ParamsOptions("NO", false, false)) - .setValue(true) + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_TRUE) .addValidate(Validate.newBuilder() .setRequired(false) .build()) diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml index 5223730e3e..eba3a171c5 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml index 8c35b3cbf6..4d3a2ecd8e 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java index 70ab76381a..a81cb4f12e 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java @@ -56,8 +56,8 @@ public class ScriptAlertChannelFactory implements AlertChannelFactory { .build(); RadioParam scriptTypeParams = RadioParam.newBuilder(ScriptParamsConstants.NAME_SCRIPT_TYPE, ScriptParamsConstants.SCRIPT_TYPE) - .addParamsOptions(new ParamsOptions(ScriptType.SHELL.getDescp(), ScriptType.SHELL.getCode(), false)) - .setValue(ScriptType.SHELL.getCode()) + .addParamsOptions(new ParamsOptions(ScriptType.SHELL.getDescp(), ScriptType.SHELL.getDescp(), false)) + .setValue(ScriptType.SHELL.getDescp()) .addValidate(Validate.newBuilder().setRequired(true).build()) .build(); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java index 377c318b8e..638a500299 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java @@ -33,19 +33,19 @@ public class ScriptSender { private String scriptPath; - private Integer scriptType; + private String scriptType; private String userParams; ScriptSender(Map config) { scriptPath = config.get(ScriptParamsConstants.NAME_SCRIPT_PATH); - scriptType = Integer.parseInt(config.get(ScriptParamsConstants.NAME_SCRIPT_TYPE)); + scriptType = config.get(ScriptParamsConstants.NAME_SCRIPT_TYPE); userParams = config.get(ScriptParamsConstants.NAME_SCRIPT_USER_PARAMS); } AlertResult sendScriptAlert(String msg) { AlertResult alertResult = new AlertResult(); - if (ScriptType.of(scriptType).equals(ScriptType.SHELL)) { + if (ScriptType.SHELL.getDescp().equals(scriptType)) { return executeShellScript(msg); } return alertResult; diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java index 59b17d0a58..ff3b8096bb 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java @@ -45,18 +45,4 @@ public enum ScriptType { return descp; } - private static final Map SCRIPT_TYPE_MAP = new HashMap<>(); - - static { - for (ScriptType scriptType : ScriptType.values()) { - SCRIPT_TYPE_MAP.put(scriptType.code, scriptType); - } - } - - public static ScriptType of(Integer code) { - if (SCRIPT_TYPE_MAP.containsKey(code)) { - return SCRIPT_TYPE_MAP.get(code); - } - throw new IllegalArgumentException("invalid code : " + code); - } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java index 1cd74cfaba..7f15ed8a7b 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java @@ -40,7 +40,7 @@ public class ScriptSenderTest { @Before public void initScriptConfig() { - scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_TYPE, String.valueOf(ScriptType.SHELL.getCode())); + scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_TYPE, String.valueOf(ScriptType.SHELL.getDescp())); scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_USER_PARAMS, "userParams"); scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_PATH, shellFilPath); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml index 123cebc5c5..469b941188 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-alert-plugin/pom.xml b/dolphinscheduler-alert-plugin/pom.xml index d199dc4e2c..a10f7de254 100644 --- a/dolphinscheduler-alert-plugin/pom.xml +++ b/dolphinscheduler-alert-plugin/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-alert/pom.xml b/dolphinscheduler-alert/pom.xml index cea763ccbd..94d8f0a319 100644 --- a/dolphinscheduler-alert/pom.xml +++ b/dolphinscheduler-alert/pom.xml @@ -21,7 +21,7 @@ org.apache.dolphinscheduler dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} dolphinscheduler-alert ${project.artifactId} diff --git a/dolphinscheduler-api/pom.xml b/dolphinscheduler-api/pom.xml index 6ee75837b7..b09de8628a 100644 --- a/dolphinscheduler-api/pom.xml +++ b/dolphinscheduler-api/pom.xml @@ -22,7 +22,7 @@ org.apache.dolphinscheduler dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} dolphinscheduler-api ${project.artifactId} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java index 17faad04bc..e2360db0b1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java @@ -14,8 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.controller; +package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_ACCESS_TOKEN_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_ACCESS_TOKEN_ERROR; @@ -54,7 +54,7 @@ import springfox.documentation.annotations.ApiIgnore; /** * access token controller */ -@Api(tags = "ACCESS_TOKEN_TAG", position = 1) +@Api(tags = "ACCESS_TOKEN_TAG") @RestController @RequestMapping("/access-token") public class AccessTokenController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java index 10590529bf..dd76753e14 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java @@ -26,13 +26,12 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ALERT_GROUP_ER import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AlertGroupService; +import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; -import java.util.HashMap; import java.util.Map; import org.slf4j.Logger; @@ -56,7 +55,7 @@ import springfox.documentation.annotations.ApiIgnore; /** * alert group controller */ -@Api(tags = "ALERT_GROUP_TAG", position = 1) +@Api(tags = "ALERT_GROUP_TAG") @RestController @RequestMapping("alert-group") public class AlertGroupController extends BaseController { @@ -88,12 +87,9 @@ public class AlertGroupController extends BaseController { @RequestParam(value = "groupName") String groupName, @RequestParam(value = "description", required = false) String description, @RequestParam(value = "alertInstanceIds") String alertInstanceIds) { - String strUserName = StringUtils.replaceNRTtoUnderline(loginUser.getUserName()); - String strGroupName = StringUtils.replaceNRTtoUnderline(groupName); - String strDescription = StringUtils.replaceNRTtoUnderline(description); - String strAlertInstanceIds = StringUtils.replaceNRTtoUnderline(alertInstanceIds); logger.info("loginUser user {}, create alert group, groupName: {}, desc: {},alertInstanceIds:{}", - strUserName, strGroupName, strDescription, strAlertInstanceIds); + RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(groupName), + RegexUtils.escapeNRT(description), RegexUtils.escapeNRT(alertInstanceIds)); Map result = alertGroupService.createAlertgroup(loginUser, groupName, description, alertInstanceIds); return returnDataList(result); } @@ -111,7 +107,7 @@ public class AlertGroupController extends BaseController { public Result list(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {}, query all alertGroup", loginUser.getUserName()); - HashMap result = alertGroupService.queryAlertgroup(); + Map result = alertGroupService.queryAlertgroup(); return returnDataList(result); } @@ -174,9 +170,9 @@ public class AlertGroupController extends BaseController { @RequestParam(value = "description", required = false) String description, @RequestParam(value = "alertInstanceIds") String alertInstanceIds) { logger.info("login user {}, updateProcessInstance alert group, groupName: {}, desc: {}", - StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), - StringUtils.replaceNRTtoUnderline(groupName), - StringUtils.replaceNRTtoUnderline(description)); + RegexUtils.escapeNRT(loginUser.getUserName()), + RegexUtils.escapeNRT(groupName), + RegexUtils.escapeNRT(description)); Map result = alertGroupService.updateAlertgroup(loginUser, id, groupName, description, alertInstanceIds); return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java index d34f42060f..21ff2506af 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java @@ -27,9 +27,9 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ALERT_PLUGIN_I import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AlertPluginInstanceService; +import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; @@ -55,7 +55,7 @@ import springfox.documentation.annotations.ApiIgnore; /** * alert plugin instance controller */ -@Api(tags = "ALERT_PLUGIN_INSTANCE_TAG", position = 1) +@Api(tags = "ALERT_PLUGIN_INSTANCE_TAG") @RestController @RequestMapping("alert-plugin-instance") public class AlertPluginInstanceController extends BaseController { @@ -89,8 +89,8 @@ public class AlertPluginInstanceController extends BaseController { @RequestParam(value = "instanceName") String instanceName, @RequestParam(value = "pluginInstanceParams") String pluginInstanceParams) { logger.info("login user {},create alert plugin instance, instanceName:{} ", - StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), - StringUtils.replaceNRTtoUnderline(instanceName)); + RegexUtils.escapeNRT(loginUser.getUserName()), + RegexUtils.escapeNRT(instanceName)); Map result = alertPluginInstanceService.create(loginUser, pluginDefineId, instanceName, pluginInstanceParams); return returnDataList(result); } @@ -117,7 +117,7 @@ public class AlertPluginInstanceController extends BaseController { @RequestParam(value = "alertPluginInstanceId") int alertPluginInstanceId, @RequestParam(value = "instanceName") String instanceName, @RequestParam(value = "pluginInstanceParams") String pluginInstanceParams) { - logger.info("login user {},update alert plugin instance id {}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), alertPluginInstanceId); + logger.info("login user {},update alert plugin instance id {}", RegexUtils.escapeNRT(loginUser.getUserName()), alertPluginInstanceId); Map result = alertPluginInstanceService.update(loginUser, alertPluginInstanceId, instanceName, pluginInstanceParams); return returnDataList(result); } @@ -138,7 +138,7 @@ public class AlertPluginInstanceController extends BaseController { @ApiException(DELETE_ALERT_PLUGIN_INSTANCE_ERROR) public Result deleteAlertPluginInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id) { - logger.info("login user {},delete alert plugin instance id {}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), id); + logger.info("login user {},delete alert plugin instance id {}", RegexUtils.escapeNRT(loginUser.getUserName()), id); Map result = alertPluginInstanceService.delete(loginUser, id); return returnDataList(result); @@ -157,7 +157,7 @@ public class AlertPluginInstanceController extends BaseController { @ApiException(GET_ALERT_PLUGIN_INSTANCE_ERROR) public Result getAlertPluginInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id) { - logger.info("login user {},get alert plugin instance, id {}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), id); + logger.info("login user {},get alert plugin instance, id {}", RegexUtils.escapeNRT(loginUser.getUserName()), id); Map result = alertPluginInstanceService.get(loginUser, id); return returnDataList(result); } @@ -173,7 +173,7 @@ public class AlertPluginInstanceController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_ALL_ALERT_PLUGIN_INSTANCE_ERROR) public Result getAlertPluginInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - logger.info("login user {}, query all alert plugin instance", StringUtils.replaceNRTtoUnderline(loginUser.getUserName())); + logger.info("login user {}, query all alert plugin instance", RegexUtils.escapeNRT(loginUser.getUserName())); Map result = alertPluginInstanceService.queryAll(); return returnDataList(result); } @@ -193,7 +193,7 @@ public class AlertPluginInstanceController extends BaseController { @ResponseStatus(HttpStatus.OK) public Result verifyGroupName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "alertInstanceName") String alertInstanceName) { - logger.info("login user {},verify alert instance name: {}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), StringUtils.replaceNRTtoUnderline(alertInstanceName)); + logger.info("login user {},verify alert instance name: {}", RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(alertInstanceName)); boolean exist = alertPluginInstanceService.checkExistPluginInstanceName(alertInstanceName); Result result = new Result(); @@ -227,7 +227,7 @@ public class AlertPluginInstanceController extends BaseController { public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { - logger.info("login user {}, list paging, pageNo: {}, pageSize: {}",StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), pageNo, pageSize); + logger.info("login user {}, list paging, pageNo: {}, pageSize: {}", RegexUtils.escapeNRT(loginUser.getUserName()), pageNo, pageSize); Map result = checkPageParams(pageNo, pageSize); if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java index c434398679..c9202d4ac6 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java @@ -14,8 +14,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.common.Constants.COMMA; +import static org.apache.dolphinscheduler.common.Constants.HTTP_HEADER_UNKNOWN; +import static org.apache.dolphinscheduler.common.Constants.HTTP_X_FORWARDED_FOR; +import static org.apache.dolphinscheduler.common.Constants.HTTP_X_REAL_IP; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; @@ -23,12 +29,11 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.Resource; -import javax.servlet.http.HttpServletRequest; import java.text.MessageFormat; import java.util.HashMap; import java.util.Map; -import static org.apache.dolphinscheduler.common.Constants.*; +import javax.servlet.http.HttpServletRequest; /** * base controller diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java index f53391f203..eac0b63097 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java @@ -14,33 +14,44 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.COMMAND_STATE_COUNT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.COUNT_PROCESS_DEFINITION_USER_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.COUNT_PROCESS_INSTANCE_STATE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUEUE_COUNT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.TASK_INSTANCE_STATE_COUNT_ERROR; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.DataAnalysisService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; -import static org.apache.dolphinscheduler.api.enums.Status.*; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** * data analysis controller */ -@Api(tags = "DATA_ANALYSIS_TAG", position = 1) +@Api(tags = "DATA_ANALYSIS_TAG") @RestController @RequestMapping("projects/analysis") public class DataAnalysisController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java index 9fbe8f4e0a..05f78f8db2 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java @@ -62,7 +62,7 @@ import springfox.documentation.annotations.ApiIgnore; /** * data source controller */ -@Api(tags = "DATA_SOURCE_TAG", position = 3) +@Api(tags = "DATA_SOURCE_TAG") @RestController @RequestMapping("datasources") public class DataSourceController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java index c1e75a73f5..a84554af17 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java @@ -35,7 +35,6 @@ import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.User; -import java.text.ParseException; import java.util.Map; import org.slf4j.Logger; @@ -58,9 +57,9 @@ import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; /** - * execute process controller + * executor controller */ -@Api(tags = "PROCESS_INSTANCE_EXECUTOR_TAG", position = 1) +@Api(tags = "EXECUTOR_TAG") @RestController @RequestMapping("projects/{projectName}/executors") public class ExecutorController extends BaseController { @@ -121,7 +120,7 @@ public class ExecutorController extends BaseController { @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "timeout", required = false) Integer timeout, - @RequestParam(value = "startParams", required = false) String startParams) throws ParseException { + @RequestParam(value = "startParams", required = false) String startParams) { logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, " + "failure policy: {}, node name: {}, node dep: {}, notify type: {}, " + "notify group id: {}, run mode: {},process instance priority:{}, workerGroup: {}, timeout: {}, startParams: {} ", diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java index 7d612b8b1d..1c80f8f00a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java @@ -14,8 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.controller; +package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_INSTANCE_LOG_ERROR; @@ -46,11 +46,10 @@ import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import springfox.documentation.annotations.ApiIgnore; - /** - * log controller + * logger controller */ -@Api(tags = "LOGGER_TAG", position = 13) +@Api(tags = "LOGGER_TAG") @RestController @RequestMapping("/log") public class LoggerController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java index ce21425605..0254a62a9f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java @@ -14,8 +14,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.IP_IS_EMPTY; +import static org.apache.dolphinscheduler.api.enums.Status.SIGN_OUT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.USER_LOGIN_FAILURE; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; @@ -25,28 +29,34 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.*; + import org.apache.commons.httpclient.HttpStatus; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.util.Map; - -import static org.apache.dolphinscheduler.api.enums.Status.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** - * user login controller - *

- * swagger bootstrap ui docs refer : https://doc.xiaominfo.com/guide/enh-func.html + * login controller */ -@Api(tags = "LOGIN_TAG", position = 1) +@Api(tags = "LOGIN_TAG") @RestController @RequestMapping("") public class LoginController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java index 308a6d33d5..329575b29a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java @@ -14,31 +14,40 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.LIST_MASTERS_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.LIST_WORKERS_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATABASE_STATE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_ZOOKEEPER_STATE_ERROR; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; -import static org.apache.dolphinscheduler.api.enums.Status.*; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** * monitor controller */ -@Api(tags = "MONITOR_TAG", position = 1) +@Api(tags = "MONITOR_TAG") @RestController @RequestMapping("/monitor") public class MonitorController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java index de80740d4e..ab8fc2cbb0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java @@ -38,6 +38,7 @@ import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; +import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ReleaseState; @@ -76,11 +77,10 @@ import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; - /** * process definition controller */ -@Api(tags = "PROCESS_DEFINITION_TAG", position = 2) +@Api(tags = "PROCESS_DEFINITION_TAG") @RestController @RequestMapping("projects/{projectName}/process") public class ProcessDefinitionController extends BaseController { @@ -154,10 +154,10 @@ public class ProcessDefinitionController extends BaseController { @RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds, @RequestParam(value = "targetProjectId", required = true) int targetProjectId) { logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}", - StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), - StringUtils.replaceNRTtoUnderline(projectName), - StringUtils.replaceNRTtoUnderline(processDefinitionIds), - StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId))); + RegexUtils.escapeNRT(loginUser.getUserName()), + RegexUtils.escapeNRT(projectName), + RegexUtils.escapeNRT(processDefinitionIds), + RegexUtils.escapeNRT(String.valueOf(targetProjectId))); return returnDataList( processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId)); @@ -185,10 +185,10 @@ public class ProcessDefinitionController extends BaseController { @RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds, @RequestParam(value = "targetProjectId", required = true) int targetProjectId) { logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}", - StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), - StringUtils.replaceNRTtoUnderline(projectName), - StringUtils.replaceNRTtoUnderline(processDefinitionIds), - StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId))); + RegexUtils.escapeNRT(loginUser.getUserName()), + RegexUtils.escapeNRT(projectName), + RegexUtils.escapeNRT(processDefinitionIds), + RegexUtils.escapeNRT(String.valueOf(targetProjectId))); return returnDataList( processDefinitionService.batchMoveProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId)); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java index 1f1ec1ed7b..ed0a98e9b2 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java @@ -14,8 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROCESS_INSTANCE_ERROR; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ProcessInstanceService; @@ -27,13 +39,6 @@ import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; import java.io.IOException; import java.text.ParseException; @@ -42,12 +47,30 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.apache.dolphinscheduler.api.enums.Status.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; +import springfox.documentation.annotations.ApiIgnore; /** * process instance controller */ -@Api(tags = "PROCESS_INSTANCE_TAG", position = 10) +@Api(tags = "PROCESS_INSTANCE_TAG") @RestController @RequestMapping("projects/{projectName}/instance") public class ProcessInstanceController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java index 1bf5003946..1d45058ca7 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java @@ -14,37 +14,53 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROJECT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROJECT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.IMPORT_PROCESS_DEFINE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_AUTHORIZED_PROJECT; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROJECT_DETAILS_BY_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UNAUTHORIZED_PROJECT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROJECT_ERROR; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; -import static org.apache.dolphinscheduler.api.enums.Status.*; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** * project controller */ -@Api(tags = "PROJECT_TAG", position = 1) +@Api(tags = "PROJECT_TAG") @RestController @RequestMapping("projects") public class ProjectController extends BaseController { @@ -143,8 +159,8 @@ public class ProjectController extends BaseController { @ApiOperation(value = "queryProjectListPaging", notes = "QUERY_PROJECT_LIST_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), - @ApiImplicitParam(name = "projectId", value = "PAGE_SIZE", dataType = "Int", example = "20"), - @ApiImplicitParam(name = "projectId", value = "PAGE_NO", dataType = "Int", example = "1") + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "20"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "1") }) @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) @@ -239,8 +255,8 @@ public class ProjectController extends BaseController { @ApiException(QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR) public Result queryProjectCreatedAndAuthorizedByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {}, query authorized and user created project by user id: {}.", - StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), - StringUtils.replaceNRTtoUnderline(String.valueOf(loginUser.getId()))); + RegexUtils.escapeNRT(loginUser.getUserName()), + RegexUtils.escapeNRT(String.valueOf(loginUser.getId()))); Map result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser); return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java index cf62d1340b..7c196ba3fb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java @@ -14,8 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_QUEUE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_QUEUE_LIST_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_QUEUE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_QUEUE_ERROR; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; @@ -24,26 +29,31 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static org.apache.dolphinscheduler.api.enums.Status.*; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** * queue controller */ -@Api(tags = "QUEUE_TAG", position = 1) +@Api(tags = "QUEUE_TAG") @RestController @RequestMapping("/queue") public class QueueController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java index 52fd023c35..68904b51f2 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java @@ -82,7 +82,7 @@ import springfox.documentation.annotations.ApiIgnore; /** * resources controller */ -@Api(tags = "RESOURCES_TAG", position = 1) +@Api(tags = "RESOURCES_TAG") @RestController @RequestMapping("resources") public class ResourcesController extends BaseController { @@ -322,9 +322,7 @@ public class ResourcesController extends BaseController { @RequestParam(value = "programType",required = false) ProgramType programType ) { String programTypeName = programType == null ? "" : programType.name(); - String userName = loginUser.getUserName(); - userName = userName.replaceAll("[\n|\r|\t]", "_"); - logger.info("query resource list, login user:{}, resource type:{}, program type:{}", userName,programTypeName); + logger.info("query resource list, resource type:{}, program type:{}", type, programTypeName); Map result = resourceService.queryResourceByProgramType(loginUser, type,programType); return returnDataList(result); } @@ -641,9 +639,7 @@ public class ResourcesController extends BaseController { @ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR) public Result queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("type") UdfType type) { - String userName = loginUser.getUserName(); - userName = userName.replaceAll("[\n|\r|\t]", "_"); - logger.info("query udf func list, user:{}, type:{}", userName, type); + logger.info("query udf func list, type:{}", type); Map result = udfFuncService.queryUdfFuncList(loginUser, type.ordinal()); return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java index ca57ad11a4..c3492f7993 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java @@ -29,13 +29,13 @@ import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.SchedulerService; +import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; @@ -61,9 +61,9 @@ import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; /** - * schedule controller + * scheduler controller */ -@Api(tags = "SCHEDULER_TAG", position = 13) +@Api(tags = "SCHEDULER_TAG") @RestController @RequestMapping("/projects/{projectName}/schedule") public class SchedulerController extends BaseController { @@ -116,7 +116,7 @@ public class SchedulerController extends BaseController { @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) { logger.info("login user {},project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," + "failure policy: {},processInstancePriority : {}, workGroupId:{}", - StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), StringUtils.replaceNRTtoUnderline(projectName), processDefinitionId, schedule, warningType, warningGroupId, + RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(projectName), processDefinitionId, schedule, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); Map result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); @@ -161,7 +161,7 @@ public class SchedulerController extends BaseController { @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { logger.info("login user {},project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " + "failure policy: {},processInstancePriority : {},workerGroupId:{}", - StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), StringUtils.replaceNRTtoUnderline(projectName), id, schedule, warningType, warningGroupId, failureStrategy, + RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(projectName), id, schedule, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); Map result = schedulerService.updateSchedule(loginUser, projectName, id, schedule, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java index a07478315a..1d7459c1b0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java @@ -22,11 +22,11 @@ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_LIST_PAGIN import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskInstanceService; +import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; @@ -54,7 +54,7 @@ import springfox.documentation.annotations.ApiIgnore; /** * task instance controller */ -@Api(tags = "TASK_INSTANCE_TAG", position = 11) +@Api(tags = "TASK_INSTANCE_TAG") @RestController @RequestMapping("/projects/{projectName}/task-instance") public class TaskInstanceController extends BaseController { @@ -64,7 +64,6 @@ public class TaskInstanceController extends BaseController { @Autowired TaskInstanceService taskInstanceService; - /** * query task list paging * @@ -113,16 +112,16 @@ public class TaskInstanceController extends BaseController { @RequestParam("pageSize") Integer pageSize) { logger.info("query task instance list, projectName:{}, processInstanceId:{}, processInstanceName:{}, search value:{}, taskName:{}, executorName: {}, stateType:{}, host:{}, start:{}, end:{}", - StringUtils.replaceNRTtoUnderline(projectName), + RegexUtils.escapeNRT(projectName), processInstanceId, - StringUtils.replaceNRTtoUnderline(processInstanceName), - StringUtils.replaceNRTtoUnderline(searchVal), - StringUtils.replaceNRTtoUnderline(taskName), - StringUtils.replaceNRTtoUnderline(executorName), + RegexUtils.escapeNRT(processInstanceName), + RegexUtils.escapeNRT(searchVal), + RegexUtils.escapeNRT(taskName), + RegexUtils.escapeNRT(executorName), stateType, - StringUtils.replaceNRTtoUnderline(host), - StringUtils.replaceNRTtoUnderline(startTime), - StringUtils.replaceNRTtoUnderline(endTime)); + RegexUtils.escapeNRT(host), + RegexUtils.escapeNRT(startTime), + RegexUtils.escapeNRT(endTime)); searchVal = ParameterUtils.handleEscapes(searchVal); Map result = taskInstanceService.queryTaskListPaging( loginUser, projectName, processInstanceId, processInstanceName, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java index 4ff769dce6..bf58e09d78 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java @@ -14,8 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskRecordService; @@ -23,20 +25,23 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.User; +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; import springfox.documentation.annotations.ApiIgnore; -import java.util.Map; - -import static org.apache.dolphinscheduler.api.enums.Status.*; - /** - * data quality controller + * task record controller */ @ApiIgnore @RestController diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java index bbd9b1d7b5..41eff97e0c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java @@ -27,9 +27,9 @@ import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_OS_TENANT_CODE import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TenantService; +import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; @@ -53,18 +53,16 @@ import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import springfox.documentation.annotations.ApiIgnore; - /** * tenant controller */ -@Api(tags = "TENANT_TAG", position = 1) +@Api(tags = "TENANT_TAG") @RestController @RequestMapping("/tenant") public class TenantController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(TenantController.class); - @Autowired private TenantService tenantService; @@ -91,15 +89,13 @@ public class TenantController extends BaseController { @RequestParam(value = "tenantCode") String tenantCode, @RequestParam(value = "queueId") int queueId, @RequestParam(value = "description", required = false) String description) throws Exception { - String userReplace = StringUtils.replaceNRTtoUnderline(loginUser.getUserName()); - String tenantCodeReplace = StringUtils.replaceNRTtoUnderline(tenantCode); - String descReplace = StringUtils.replaceNRTtoUnderline(description); - logger.info("login user {}, create tenant, tenantCode: {}, queueId: {}, desc: {}", userReplace, tenantCodeReplace, queueId, descReplace); + logger.info("login user {}, create tenant, tenantCode: {}, queueId: {}, desc: {}", + RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(tenantCode), + queueId, RegexUtils.escapeNRT(description)); Map result = tenantService.createTenant(loginUser, tenantCode, queueId, description); return returnDataList(result); } - /** * query tenant list paging * @@ -177,9 +173,9 @@ public class TenantController extends BaseController { @RequestParam(value = "tenantCode") String tenantCode, @RequestParam(value = "queueId") int queueId, @RequestParam(value = "description", required = false) String description) throws Exception { - String userReplace = StringUtils.replaceNRTtoUnderline(loginUser.getUserName()); - String tenantCodeReplace = StringUtils.replaceNRTtoUnderline(tenantCode); - String descReplace = StringUtils.replaceNRTtoUnderline(description); + String userReplace = RegexUtils.escapeNRT(loginUser.getUserName()); + String tenantCodeReplace = RegexUtils.escapeNRT(tenantCode); + String descReplace = RegexUtils.escapeNRT(description); logger.info("login user {}, create tenant, tenantCode: {}, queueId: {}, desc: {}", userReplace, tenantCodeReplace, queueId, descReplace); Map result = tenantService.updateTenant(loginUser, id, tenantCode, queueId, description); return returnDataList(result); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UiPluginController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UiPluginController.java index 3589bdcdc0..f0d262d29f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UiPluginController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UiPluginController.java @@ -46,12 +46,12 @@ import io.swagger.annotations.ApiOperation; import springfox.documentation.annotations.ApiIgnore; /** - * UiPluginController + * ui plugin controller * Some plugins (such as alert plugin) need to provide UI interfaces to users. * We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams. * From-create can generate dynamic ui based on this parameter. */ -@Api(tags = "UI_PLUGINS", position = 1) +@Api(tags = "UI_PLUGINS_TAG") @RestController @RequestMapping("ui-plugins") public class UiPluginController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java index b63e201e60..853e301efd 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java @@ -14,8 +14,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_USER_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_USER_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_USER_BY_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.GET_USER_INFO_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.GRANT_DATASOURCE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.GRANT_PROJECT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.GRANT_RESOURCE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.GRANT_UDF_FUNCTION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_USER_LIST_PAGING_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UNAUTHORIZED_USER_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_USER_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.USER_LIST_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_USERNAME_ERROR; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; @@ -24,28 +38,34 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import static org.apache.dolphinscheduler.api.enums.Status.*; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** - * user controller + * users controller */ -@Api(tags = "USERS_TAG", position = 14) +@Api(tags = "USERS_TAG") @RestController @RequestMapping("/users") public class UsersController extends BaseController { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java index 8468a65cdb..a92769263a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java @@ -14,9 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; -import io.swagger.annotations.ApiParam; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKFLOW_LINEAGE_ERROR; +import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; import org.apache.dolphinscheduler.api.service.WorkFlowLineageService; import org.apache.dolphinscheduler.api.utils.Result; @@ -24,21 +26,31 @@ import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKFLOW_LINEAGE_ERROR; -import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiParam; +import springfox.documentation.annotations.ApiIgnore; + +/** + * work flow lineage controller + */ +@Api(tags = "WORK_FLOW_LINEAGE_TAG") @RestController @RequestMapping("lineages/{projectId}") public class WorkFlowLineageController extends BaseController { @@ -47,7 +59,7 @@ public class WorkFlowLineageController extends BaseController { @Autowired private WorkFlowLineageService workFlowLineageService; - @GetMapping(value="/list-name") + @GetMapping(value = "/list-name") @ResponseStatus(HttpStatus.OK) public Result> queryWorkFlowLineageByName(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId, @@ -56,32 +68,30 @@ public class WorkFlowLineageController extends BaseController { searchVal = ParameterUtils.handleEscapes(searchVal); Map result = workFlowLineageService.queryWorkFlowLineageByName(searchVal,projectId); return returnDataList(result); - } catch (Exception e){ + } catch (Exception e) { logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e); return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg()); } } - @GetMapping(value="/list-ids") + @GetMapping(value = "/list-ids") @ResponseStatus(HttpStatus.OK) public Result> queryWorkFlowLineageByIds(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId, @ApiIgnore @RequestParam(value = "ids", required = false) String ids) { - try { ids = ParameterUtils.handleEscapes(ids); Set idsSet = new HashSet<>(); - if(ids != null) { + if (ids != null) { String[] idsStr = ids.split(","); - for (String id : idsStr) - { + for (String id : idsStr) { idsSet.add(Integer.parseInt(id)); } } Map result = workFlowLineageService.queryWorkFlowLineageByIds(idsSet, projectId); return returnDataList(result); - } catch (Exception e){ + } catch (Exception e) { logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e); return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg()); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java index 70b3aecb4f..00418ab64e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java @@ -14,8 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKER_GROUP_FAIL; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.WorkerGroupService; @@ -23,25 +25,30 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; -import java.util.Map; - -import static org.apache.dolphinscheduler.api.enums.Status.*; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** * worker group controller */ -@Api(tags = "WORKER_GROUP_TAG", position = 1) +@Api(tags = "WORKER_GROUP_TAG") @RestController @RequestMapping("/worker-group") public class WorkerGroupController extends BaseController { @@ -51,9 +58,6 @@ public class WorkerGroupController extends BaseController { @Autowired WorkerGroupService workerGroupService; - - - /** * query worker groups paging * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java index b1c320566f..eb5b150f42 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.dao.entity.User; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java index e1e63a623c..71e09cab6d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java @@ -17,55 +17,21 @@ package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.AlertType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; -import java.util.Date; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; - /** * alert group service */ -@Service -public class AlertGroupService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(AlertGroupService.class); - - @Autowired - private AlertGroupMapper alertGroupMapper; +public interface AlertGroupService { /** * query alert group list * * @return alert group list */ - public HashMap queryAlertgroup() { - - HashMap result = new HashMap<>(); - List alertGroups = alertGroupMapper.queryAllGroupList(); - result.put(Constants.DATA_LIST, alertGroups); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryAlertgroup(); /** * paging query alarm group list @@ -76,24 +42,7 @@ public class AlertGroupService extends BaseService { * @param pageSize page size * @return alert group list page */ - public Map listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - - Map result = new HashMap<>(); - if (isNotAdmin(loginUser, result)) { - return result; - } - - Page page = new Page(pageNo, pageSize); - IPage alertGroupIPage = alertGroupMapper.queryAlertGroupPage( - page, searchVal); - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount((int) alertGroupIPage.getTotal()); - pageInfo.setLists(alertGroupIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } + Map listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize); /** * create alert group @@ -104,33 +53,7 @@ public class AlertGroupService extends BaseService { * @param alertInstanceIds alertInstanceIds * @return create result code */ - public Map createAlertgroup(User loginUser, String groupName, String desc, String alertInstanceIds) { - Map result = new HashMap<>(); - //only admin can operate - if (isNotAdmin(loginUser, result)) { - return result; - } - - AlertGroup alertGroup = new AlertGroup(); - Date now = new Date(); - - alertGroup.setGroupName(groupName); - alertGroup.setAlertInstanceIds(alertInstanceIds); - alertGroup.setDescription(desc); - alertGroup.setCreateTime(now); - alertGroup.setUpdateTime(now); - alertGroup.setCreateUserId(loginUser.getId()); - - // insert - int insert = alertGroupMapper.insert(alertGroup); - - if (insert > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.CREATE_ALERT_GROUP_ERROR); - } - return result; - } + Map createAlertgroup(User loginUser, String groupName, String desc, String alertInstanceIds); /** * updateProcessInstance alert group @@ -142,35 +65,7 @@ public class AlertGroupService extends BaseService { * @param alertInstanceIds alertInstanceIds * @return update result code */ - public Map updateAlertgroup(User loginUser, int id, String groupName, String desc, String alertInstanceIds) { - Map result = new HashMap<>(); - - if (isNotAdmin(loginUser, result)) { - return result; - } - - AlertGroup alertGroup = alertGroupMapper.selectById(id); - - if (alertGroup == null) { - putMsg(result, Status.ALERT_GROUP_NOT_EXIST); - return result; - - } - - Date now = new Date(); - - if (StringUtils.isNotEmpty(groupName)) { - alertGroup.setGroupName(groupName); - } - alertGroup.setDescription(desc); - alertGroup.setUpdateTime(now); - alertGroup.setCreateUserId(loginUser.getId()); - alertGroup.setAlertInstanceIds(alertInstanceIds); - // updateProcessInstance - alertGroupMapper.updateById(alertGroup); - putMsg(result, Status.SUCCESS); - return result; - } + Map updateAlertgroup(User loginUser, int id, String groupName, String desc, String alertInstanceIds); /** * delete alert group by id @@ -179,25 +74,7 @@ public class AlertGroupService extends BaseService { * @param id alert group id * @return delete result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map delAlertgroupById(User loginUser, int id) { - Map result = new HashMap<>(); - result.put(Constants.STATUS, false); - - //only admin can operate - if (isNotAdmin(loginUser, result)) { - return result; - } - //check exist - AlertGroup alertGroup = alertGroupMapper.selectById(id); - if (alertGroup == null) { - putMsg(result, Status.ALERT_GROUP_NOT_EXIST); - return result; - } - alertGroupMapper.deleteById(id); - putMsg(result, Status.SUCCESS); - return result; - } + Map delAlertgroupById(User loginUser, int id); /** * verify group name exists @@ -205,8 +82,5 @@ public class AlertGroupService extends BaseService { * @param groupName group name * @return check result code */ - public boolean existGroupName(String groupName) { - List alertGroup = alertGroupMapper.queryByGroupName(groupName); - return CollectionUtils.isNotEmpty(alertGroup); - } + boolean existGroupName(String groupName); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java index 55826bf3d2..04ff3c875f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java @@ -14,26 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; - -import java.text.MessageFormat; -import java.util.Map; -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; +package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.HadoopUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; +import java.io.IOException; +import java.util.Map; + /** * base service */ -public class BaseService { +public interface BaseService { /** * check admin @@ -41,9 +35,7 @@ public class BaseService { * @param user input user * @return ture if administrator, otherwise return false */ - protected boolean isAdmin(User user) { - return user.getUserType() == UserType.ADMIN_USER; - } + boolean isAdmin(User user); /** * isNotAdmin @@ -52,14 +44,7 @@ public class BaseService { * @param result result code * @return true if not administrator, otherwise false */ - protected boolean isNotAdmin(User loginUser, Map result) { - //only admin can operate - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return true; - } - return false; - } + boolean isNotAdmin(User loginUser, Map result); /** * put message to map @@ -68,14 +53,7 @@ public class BaseService { * @param status status * @param statusParams status message */ - protected void putMsg(Map result, Status status, Object... statusParams) { - result.put(Constants.STATUS, status); - if (statusParams != null && statusParams.length > 0) { - result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); - } else { - result.put(Constants.MSG, status.getMsg()); - } - } + void putMsg(Map result, Status status, Object... statusParams); /** * put message to result object @@ -84,16 +62,7 @@ public class BaseService { * @param status status * @param statusParams status message */ - protected void putMsg(Result result, Status status, Object... statusParams) { - result.setCode(status.getCode()); - - if (statusParams != null && statusParams.length > 0) { - result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); - } else { - result.setMsg(status.getMsg()); - } - - } + void putMsg(Result result, Status status, Object... statusParams); /** * check @@ -103,34 +72,21 @@ public class BaseService { * @param userNoOperationPerm status * @return check result */ - protected boolean check(Map result, boolean bool, Status userNoOperationPerm) { - //only admin can operate - if (bool) { - result.put(Constants.STATUS, userNoOperationPerm); - result.put(Constants.MSG, userNoOperationPerm.getMsg()); - return true; - } - return false; - } + boolean check(Map result, boolean bool, Status userNoOperationPerm); /** * create tenant dir if not exists * * @param tenantCode tenant code - * @throws Exception if hdfs operation exception + * @throws IOException if hdfs operation exception */ - protected void createTenantDirIfNotExists(String tenantCode) throws Exception { - - String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); - String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); - /** - * init resource path and udf path - */ - HadoopUtils.getInstance().mkdir(resourcePath); - HadoopUtils.getInstance().mkdir(udfsPath); - } + void createTenantDirIfNotExists(String tenantCode) throws IOException; - protected boolean hasPerm(User operateUser, int createUserId) { - return operateUser.getId() == createUserId || isAdmin(operateUser); - } + /** + * has perm + * + * @param operateUser operate user + * @param createUserId create user id + */ + boolean hasPerm(User operateUser, int createUserId); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java index 70fb272bea..b8b6be58ff 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java @@ -14,8 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; +package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.dao.entity.User; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java index 2ca9cbea6a..fbaaf4cbfa 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java @@ -17,67 +17,17 @@ package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; -import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; -import org.apache.dolphinscheduler.dao.datasource.OracleDataSource; -import org.apache.dolphinscheduler.dao.entity.DataSource; -import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; -import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; -import java.sql.Connection; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import com.fasterxml.jackson.databind.node.ObjectNode; /** - * datasource service + * data source service */ -@Service -public class DataSourceService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(DataSourceService.class); - - public static final String NAME = "name"; - public static final String NOTE = "note"; - public static final String TYPE = "type"; - public static final String HOST = "host"; - public static final String PORT = "port"; - public static final String PRINCIPAL = "principal"; - public static final String DATABASE = "database"; - public static final String USER_NAME = "userName"; - public static final String OTHER = "other"; - - @Autowired - private DataSourceMapper dataSourceMapper; - - @Autowired - private DataSourceUserMapper datasourceUserMapper; +public interface DataSourceService { /** * create data source @@ -89,37 +39,7 @@ public class DataSourceService extends BaseService { * @param parameter datasource parameters * @return create result code */ - public Result createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { - - Result result = new Result<>(); - // check name can use or not - if (checkName(name)) { - putMsg(result, Status.DATASOURCE_EXIST); - return result; - } - Result isConnection = checkConnection(type, parameter); - if (Status.SUCCESS.getCode() != isConnection.getCode()) { - return result; - } - - // build datasource - DataSource dataSource = new DataSource(); - Date now = new Date(); - - dataSource.setName(name.trim()); - dataSource.setNote(desc); - dataSource.setUserId(loginUser.getId()); - dataSource.setUserName(loginUser.getUserName()); - dataSource.setType(type); - dataSource.setConnectionParams(parameter); - dataSource.setCreateTime(now); - dataSource.setUpdateTime(now); - dataSourceMapper.insert(dataSource); - - putMsg(result, Status.SUCCESS); - - return result; - } + Result createDataSource(User loginUser, String name, String desc, DbType type, String parameter); /** * updateProcessInstance datasource @@ -132,59 +52,7 @@ public class DataSourceService extends BaseService { * @param id data source id * @return update result code */ - public Result updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { - - Result result = new Result<>(); - // determine whether the data source exists - DataSource dataSource = dataSourceMapper.selectById(id); - if (dataSource == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - if (!hasPerm(loginUser, dataSource.getUserId())) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - //check name can use or not - if (!name.trim().equals(dataSource.getName()) && checkName(name)) { - putMsg(result, Status.DATASOURCE_EXIST); - return result; - } - //check password,if the password is not updated, set to the old password. - ObjectNode paramObject = JSONUtils.parseObject(parameter); - String password = paramObject.path(Constants.PASSWORD).asText(); - if (StringUtils.isBlank(password)) { - String oldConnectionParams = dataSource.getConnectionParams(); - ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams); - paramObject.put(Constants.PASSWORD, oldParams.path(Constants.PASSWORD).asText()); - } - // connectionParams json - String connectionParams = paramObject.toString(); - - Result isConnection = checkConnection(type, parameter); - if (Status.SUCCESS.getCode() != isConnection.getCode()) { - return result; - } - - Date now = new Date(); - - dataSource.setName(name.trim()); - dataSource.setNote(desc); - dataSource.setUserName(loginUser.getUserName()); - dataSource.setType(type); - dataSource.setConnectionParams(connectionParams); - dataSource.setUpdateTime(now); - dataSourceMapper.updateById(dataSource); - putMsg(result, Status.SUCCESS); - return result; - } - - private boolean checkName(String name) { - List queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim()); - return queryDataSource != null && queryDataSource.size() > 0; - } + Result updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter); /** * updateProcessInstance datasource @@ -192,91 +60,7 @@ public class DataSourceService extends BaseService { * @param id datasource id * @return data source detail */ - public Map queryDataSource(int id) { - - Map result = new HashMap(5); - DataSource dataSource = dataSourceMapper.selectById(id); - if (dataSource == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - // type - String dataSourceType = dataSource.getType().toString(); - // name - String dataSourceName = dataSource.getName(); - // desc - String desc = dataSource.getNote(); - // parameter - String parameter = dataSource.getConnectionParams(); - - BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter); - DbConnectType connectType = null; - String hostSeperator = Constants.DOUBLE_SLASH; - if (DbType.ORACLE.equals(dataSource.getType())) { - connectType = ((OracleDataSource) datasourceForm).getConnectType(); - if (DbConnectType.ORACLE_SID.equals(connectType)) { - hostSeperator = Constants.AT_SIGN; - } - } - String database = datasourceForm.getDatabase(); - // jdbc connection params - String other = datasourceForm.getOther(); - String address = datasourceForm.getAddress(); - - String[] hostsPorts = getHostsAndPort(address, hostSeperator); - // ip host - String host = hostsPorts[0]; - // prot - String port = hostsPorts[1]; - String separator = ""; - - switch (dataSource.getType()) { - case HIVE: - case SQLSERVER: - separator = ";"; - break; - case MYSQL: - case POSTGRESQL: - case CLICKHOUSE: - case ORACLE: - case PRESTO: - separator = "&"; - break; - default: - separator = "&"; - break; - } - - Map otherMap = new LinkedHashMap(); - if (other != null) { - String[] configs = other.split(separator); - for (String config : configs) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - - } - - Map map = new HashMap<>(10); - map.put(NAME, dataSourceName); - map.put(NOTE, desc); - map.put(TYPE, dataSourceType); - if (connectType != null) { - map.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType); - } - - map.put(HOST, host); - map.put(PORT, port); - map.put(PRINCIPAL, datasourceForm.getPrincipal()); - map.put(Constants.KERBEROS_KRB5_CONF_PATH, datasourceForm.getJavaSecurityKrb5Conf()); - map.put(Constants.KERBEROS_KEY_TAB_USERNAME, datasourceForm.getLoginUserKeytabUsername()); - map.put(Constants.KERBEROS_KEY_TAB_PATH, datasourceForm.getLoginUserKeytabPath()); - map.put(DATABASE, database); - map.put(USER_NAME, datasourceForm.getUser()); - map.put(OTHER, otherMap); - result.put(Constants.DATA_LIST, map); - putMsg(result, Status.SUCCESS); - return result; - } + Map queryDataSource(int id); /** * query datasource list by keyword @@ -287,44 +71,7 @@ public class DataSourceService extends BaseService { * @param pageSize page size * @return data source list page */ - public Map queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(); - IPage dataSourceList = null; - Page dataSourcePage = new Page(pageNo, pageSize); - - if (isAdmin(loginUser)) { - dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal); - } else { - dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal); - } - - List dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>(); - handlePasswd(dataSources); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L)); - pageInfo.setLists(dataSources); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * handle datasource connection password for safety - * - * @param dataSourceList - */ - private void handlePasswd(List dataSourceList) { - - for (DataSource dataSource : dataSourceList) { - - String connectionParams = dataSource.getConnectionParams(); - ObjectNode object = JSONUtils.parseObject(connectionParams); - object.put(Constants.PASSWORD, Constants.XXXXXX); - dataSource.setConnectionParams(object.toString()); - - } - } + Map queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize); /** * query data resource list @@ -333,22 +80,7 @@ public class DataSourceService extends BaseService { * @param type data source type * @return data source list page */ - public Map queryDataSourceList(User loginUser, Integer type) { - Map result = new HashMap<>(); - - List datasourceList; - - if (isAdmin(loginUser)) { - datasourceList = dataSourceMapper.listAllDataSourceByType(type); - } else { - datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type); - } - - result.put(Constants.DATA_LIST, datasourceList); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryDataSourceList(User loginUser, Integer type); /** * verify datasource exists @@ -356,18 +88,7 @@ public class DataSourceService extends BaseService { * @param name datasource name * @return true if data datasource not exists, otherwise return false */ - public Result verifyDataSourceName(String name) { - Result result = new Result<>(); - List dataSourceList = dataSourceMapper.queryDataSourceByName(name); - if (dataSourceList != null && dataSourceList.size() > 0) { - logger.error("datasource name:{} has exist, can't create again.", name); - putMsg(result, Status.DATASOURCE_EXIST); - } else { - putMsg(result, Status.SUCCESS); - } - - return result; - } + Result verifyDataSourceName(String name); /** * check connection @@ -376,25 +97,7 @@ public class DataSourceService extends BaseService { * @param parameter data source parameters * @return true if connect successfully, otherwise false */ - public Result checkConnection(DbType type, String parameter) { - Result result = new Result<>(); - BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter); - if (datasource == null) { - putMsg(result, Status.DATASOURCE_TYPE_NOT_EXIST, type); - return result; - } - try (Connection connection = datasource.getConnection()) { - if (connection == null) { - putMsg(result, Status.CONNECTION_TEST_FAILURE); - return result; - } - putMsg(result, Status.SUCCESS); - return result; - } catch (Exception e) { - logger.error("datasource test connection error, dbType:{}, jdbcUrl:{}, message:{}.", type, datasource.getJdbcUrl(), e.getMessage()); - return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(),e.getMessage()); - } - } + Result checkConnection(DbType type, String parameter); /** * test connection @@ -402,15 +105,7 @@ public class DataSourceService extends BaseService { * @param id datasource id * @return connect result code */ - public Result connectionTest(int id) { - DataSource dataSource = dataSourceMapper.selectById(id); - if (dataSource == null) { - Result result = new Result<>(); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); - } + Result connectionTest(int id); /** * build paramters @@ -425,116 +120,10 @@ public class DataSourceService extends BaseService { * @param principal principal * @return datasource parameter */ - public String buildParameter(DbType type, String host, - String port, String database, String principal, String userName, - String password, DbConnectType connectType, String other, - String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) { - - String address = buildAddress(type, host, port, connectType); - Map parameterMap = new LinkedHashMap(6); - String jdbcUrl; - if (DbType.SQLSERVER == type) { - jdbcUrl = address + ";databaseName=" + database; - } else { - jdbcUrl = address + "/" + database; - } - - if (Constants.ORACLE.equals(type.name())) { - parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType); - } - - if (CommonUtils.getKerberosStartupState() - && (type == DbType.HIVE || type == DbType.SPARK)) { - jdbcUrl += ";principal=" + principal; - } - - String separator = ""; - if (Constants.MYSQL.equals(type.name()) - || Constants.POSTGRESQL.equals(type.name()) - || Constants.CLICKHOUSE.equals(type.name()) - || Constants.ORACLE.equals(type.name()) - || Constants.PRESTO.equals(type.name())) { - separator = "&"; - } else if (Constants.HIVE.equals(type.name()) - || Constants.SPARK.equals(type.name()) - || Constants.DB2.equals(type.name()) - || Constants.SQLSERVER.equals(type.name())) { - separator = ";"; - } - - parameterMap.put(TYPE, connectType); - parameterMap.put(Constants.ADDRESS, address); - parameterMap.put(Constants.DATABASE, database); - parameterMap.put(Constants.JDBC_URL, jdbcUrl); - parameterMap.put(Constants.USER, userName); - parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password)); - if (CommonUtils.getKerberosStartupState() - && (type == DbType.HIVE || type == DbType.SPARK)) { - parameterMap.put(Constants.PRINCIPAL, principal); - parameterMap.put(Constants.KERBEROS_KRB5_CONF_PATH, javaSecurityKrb5Conf); - parameterMap.put(Constants.KERBEROS_KEY_TAB_USERNAME, loginUserKeytabUsername); - parameterMap.put(Constants.KERBEROS_KEY_TAB_PATH, loginUserKeytabPath); - } - - Map map = JSONUtils.toMap(other); - if (map != null) { - StringBuilder otherSb = new StringBuilder(); - for (Map.Entry entry: map.entrySet()) { - otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator)); - } - if (!Constants.DB2.equals(type.name())) { - otherSb.deleteCharAt(otherSb.length() - 1); - } - parameterMap.put(Constants.OTHER, otherSb); - } - - if (logger.isDebugEnabled()) { - logger.info("parameters map:{}", JSONUtils.toJsonString(parameterMap)); - } - return JSONUtils.toJsonString(parameterMap); - - } - - private String buildAddress(DbType type, String host, String port, DbConnectType connectType) { - StringBuilder sb = new StringBuilder(); - if (Constants.MYSQL.equals(type.name())) { - sb.append(Constants.JDBC_MYSQL); - sb.append(host).append(":").append(port); - } else if (Constants.POSTGRESQL.equals(type.name())) { - sb.append(Constants.JDBC_POSTGRESQL); - sb.append(host).append(":").append(port); - } else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) { - sb.append(Constants.JDBC_HIVE_2); - String[] hostArray = host.split(","); - if (hostArray.length > 0) { - for (String zkHost : hostArray) { - sb.append(String.format("%s:%s,", zkHost, port)); - } - sb.deleteCharAt(sb.length() - 1); - } - } else if (Constants.CLICKHOUSE.equals(type.name())) { - sb.append(Constants.JDBC_CLICKHOUSE); - sb.append(host).append(":").append(port); - } else if (Constants.ORACLE.equals(type.name())) { - if (connectType == DbConnectType.ORACLE_SID) { - sb.append(Constants.JDBC_ORACLE_SID); - } else { - sb.append(Constants.JDBC_ORACLE_SERVICE_NAME); - } - sb.append(host).append(":").append(port); - } else if (Constants.SQLSERVER.equals(type.name())) { - sb.append(Constants.JDBC_SQLSERVER); - sb.append(host).append(":").append(port); - } else if (Constants.DB2.equals(type.name())) { - sb.append(Constants.JDBC_DB2); - sb.append(host).append(":").append(port); - } else if (Constants.PRESTO.equals(type.name())) { - sb.append(Constants.JDBC_PRESTO); - sb.append(host).append(":").append(port); - } - - return sb.toString(); - } + String buildParameter(DbType type, String host, + String port, String database, String principal, String userName, + String password, DbConnectType connectType, String other, + String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath); /** * delete datasource @@ -543,30 +132,7 @@ public class DataSourceService extends BaseService { * @param datasourceId data source id * @return delete result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Result delete(User loginUser, int datasourceId) { - Result result = new Result<>(); - try { - //query datasource by id - DataSource dataSource = dataSourceMapper.selectById(datasourceId); - if (dataSource == null) { - logger.error("resource id {} not exist", datasourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - if (!hasPerm(loginUser, dataSource.getUserId())) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - dataSourceMapper.deleteById(datasourceId); - datasourceUserMapper.deleteByDatasourceId(datasourceId); - putMsg(result, Status.SUCCESS); - } catch (Exception e) { - logger.error("delete datasource error", e); - throw new RuntimeException("delete datasource error"); - } - return result; - } + Result delete(User loginUser, int datasourceId); /** * unauthorized datasource @@ -575,38 +141,7 @@ public class DataSourceService extends BaseService { * @param userId user id * @return unauthed data source result code */ - public Map unauthDatasource(User loginUser, Integer userId) { - - Map result = new HashMap<>(); - //only admin operate - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - /** - * query all data sources except userId - */ - List resultList = new ArrayList<>(); - List datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId); - Set datasourceSet = null; - if (datasourceList != null && datasourceList.size() > 0) { - datasourceSet = new HashSet<>(datasourceList); - - List authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId); - - Set authedDataSourceSet = null; - if (authedDataSourceList != null && authedDataSourceList.size() > 0) { - authedDataSourceSet = new HashSet<>(authedDataSourceList); - datasourceSet.removeAll(authedDataSourceSet); - - } - resultList = new ArrayList<>(datasourceSet); - } - result.put(Constants.DATA_LIST, resultList); - putMsg(result, Status.SUCCESS); - return result; - } + Map unauthDatasource(User loginUser, Integer userId); /** * authorized datasource @@ -615,50 +150,5 @@ public class DataSourceService extends BaseService { * @param userId user id * @return authorized result code */ - public Map authedDatasource(User loginUser, Integer userId) { - Map result = new HashMap<>(); - - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - List authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId); - result.put(Constants.DATA_LIST, authedDatasourceList); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * get host and port by address - * - * @param address address - * @return sting array: [host,port] - */ - private String[] getHostsAndPort(String address) { - return getHostsAndPort(address, Constants.DOUBLE_SLASH); - } - - /** - * get host and port by address - * - * @param address address - * @param separator separator - * @return sting array: [host,port] - */ - private String[] getHostsAndPort(String address, String separator) { - String[] result = new String[2]; - String[] tmpArray = address.split(separator); - String hostsAndPorts = tmpArray[tmpArray.length - 1]; - StringBuilder hosts = new StringBuilder(); - String[] hostPortArray = hostsAndPorts.split(Constants.COMMA); - String port = hostPortArray[0].split(Constants.COLON)[1]; - for (String hostPort : hostPortArray) { - hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA); - } - hosts.deleteCharAt(hosts.length() - 1); - result[0] = hosts.toString(); - result[1] = port; - return result; - } + Map authedDatasource(User loginUser, Integer userId); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java index 77be0a048c..6bed9790b3 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java @@ -17,82 +17,22 @@ package org.apache.dolphinscheduler.api.service; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS; -import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT; - import org.apache.dolphinscheduler.api.enums.ExecuteType; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; -import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.WarningType; -import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.Schedule; -import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; -import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; -import java.text.ParseException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - /** * executor service */ -@Service -public class ExecutorService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(ExecutorService.class); - - @Autowired - private ProjectMapper projectMapper; - - @Autowired - private ProjectService projectService; - - @Autowired - private ProcessDefinitionMapper processDefinitionMapper; - - @Autowired - private MonitorService monitorService; - - - @Autowired - private ProcessInstanceMapper processInstanceMapper; - - - @Autowired - private ProcessService processService; +public interface ExecutorService { /** * execute process instance @@ -113,80 +53,14 @@ public class ExecutorService extends BaseService { * @param timeout timeout * @param startParams the global param values which pass to new process instance * @return execute process instance code - * @throws ParseException Parse Exception - */ - public Map execProcessInstance(User loginUser, String projectName, - int processDefinitionId, String cronTime, CommandType commandType, - FailureStrategy failureStrategy, String startNodeList, - TaskDependType taskDependType, WarningType warningType, int warningGroupId, - RunMode runMode, - Priority processInstancePriority, String workerGroup, Integer timeout, - Map startParams) throws ParseException { - Map result = new HashMap<>(); - // timeout is invalid - if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { - putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR); - return result; - } - Project project = projectMapper.queryByName(projectName); - Map checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project); - if (checkResultAndAuth != null) { - return checkResultAndAuth; - } - - // check process define release state - ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId); - result = checkProcessDefinitionValid(processDefinition, processDefinitionId); - if (result.get(Constants.STATUS) != Status.SUCCESS) { - return result; - } - - if (!checkTenantSuitable(processDefinition)) { - logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", - processDefinition.getId(), processDefinition.getName()); - putMsg(result, Status.TENANT_NOT_SUITABLE); - return result; - } - - // check master exists - if (!checkMasterExists(result)) { - return result; - } - - /** - * create command - */ - int create = this.createCommand(commandType, processDefinitionId, - taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), - warningGroupId, runMode, processInstancePriority, workerGroup, startParams); - - if (create > 0) { - processDefinition.setWarningGroupId(warningGroupId); - processDefinitionMapper.updateById(processDefinition); - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); - } - return result; - } - - /** - * check whether master exists - * - * @param result result - * @return master exists return true , otherwise return false */ - private boolean checkMasterExists(Map result) { - // check master server exists - List masterServers = monitorService.getServerListFromZK(true); - - // no master - if (masterServers.size() == 0) { - putMsg(result, Status.MASTER_NOT_EXISTS); - return false; - } - return true; - } + Map execProcessInstance(User loginUser, String projectName, + int processDefinitionId, String cronTime, CommandType commandType, + FailureStrategy failureStrategy, String startNodeList, + TaskDependType taskDependType, WarningType warningType, int warningGroupId, + RunMode runMode, + Priority processInstancePriority, String workerGroup, Integer timeout, + Map startParams); /** * check whether the process definition can be executed @@ -195,19 +69,7 @@ public class ExecutorService extends BaseService { * @param processDefineId process definition id * @return check result code */ - public Map checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId) { - Map result = new HashMap<>(); - if (processDefinition == null) { - // check process definition exists - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); - } else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { - // check process definition online - putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefineId); - } else { - result.put(Constants.STATUS, Status.SUCCESS); - } - return result; - } + Map checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId); /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop @@ -218,194 +80,7 @@ public class ExecutorService extends BaseService { * @param executeType execute type * @return execute result code */ - public Map execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = checkResultAndAuth(loginUser, projectName, project); - if (checkResult != null) { - return checkResult; - } - - // check master exists - if (!checkMasterExists(result)) { - return result; - } - - ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); - if (processInstance == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); - return result; - } - - ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); - if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) { - result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId()); - if (result.get(Constants.STATUS) != Status.SUCCESS) { - return result; - } - } - - checkResult = checkExecuteType(processInstance, executeType); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - return checkResult; - } - if (!checkTenantSuitable(processDefinition)) { - logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", - processDefinition.getId(), processDefinition.getName()); - putMsg(result, Status.TENANT_NOT_SUITABLE); - } - - switch (executeType) { - case REPEAT_RUNNING: - result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING); - break; - case RECOVER_SUSPENDED_PROCESS: - result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS); - break; - case START_FAILURE_TASK_PROCESS: - result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS); - break; - case STOP: - if (processInstance.getState() == ExecutionStatus.READY_STOP) { - putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); - } else { - result = updateProcessInstancePrepare(processInstance, CommandType.STOP, ExecutionStatus.READY_STOP); - } - break; - case PAUSE: - if (processInstance.getState() == ExecutionStatus.READY_PAUSE) { - putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); - } else { - result = updateProcessInstancePrepare(processInstance, CommandType.PAUSE, ExecutionStatus.READY_PAUSE); - } - break; - default: - logger.error("unknown execute type : {}", executeType); - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); - - break; - } - return result; - } - - /** - * check tenant suitable - * - * @param processDefinition process definition - * @return true if tenant suitable, otherwise return false - */ - private boolean checkTenantSuitable(ProcessDefinition processDefinition) { - // checkTenantExists(); - Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(), - processDefinition.getUserId()); - return tenant != null; - } - - /** - * Check the state of process instance and the type of operation match - * - * @param processInstance process instance - * @param executeType execute type - * @return check result code - */ - private Map checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) { - - Map result = new HashMap<>(); - ExecutionStatus executionStatus = processInstance.getState(); - boolean checkResult = false; - switch (executeType) { - case PAUSE: - case STOP: - if (executionStatus.typeIsRunning()) { - checkResult = true; - } - break; - case REPEAT_RUNNING: - if (executionStatus.typeIsFinished()) { - checkResult = true; - } - break; - case START_FAILURE_TASK_PROCESS: - if (executionStatus.typeIsFailure()) { - checkResult = true; - } - break; - case RECOVER_SUSPENDED_PROCESS: - if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) { - checkResult = true; - } - break; - default: - break; - } - if (!checkResult) { - putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); - } else { - putMsg(result, Status.SUCCESS); - } - return result; - } - - /** - * prepare to update process instance command type and status - * - * @param processInstance process instance - * @param commandType command type - * @param executionStatus execute status - * @return update result - */ - private Map updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) { - Map result = new HashMap<>(); - - processInstance.setCommandType(commandType); - processInstance.addHistoryCmd(commandType); - processInstance.setState(executionStatus); - int update = processService.updateProcessInstance(processInstance); - - // determine whether the process is normal - if (update > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); - } - return result; - } - - /** - * insert command, used in the implementation of the page, re run, recovery (pause / failure) execution - * - * @param loginUser login user - * @param instanceId instance id - * @param processDefinitionId process definition id - * @param commandType command type - * @return insert result code - */ - private Map insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) { - Map result = new HashMap<>(); - Command command = new Command(); - command.setCommandType(commandType); - command.setProcessDefinitionId(processDefinitionId); - command.setCommandParam(String.format("{\"%s\":%d}", - CMD_PARAM_RECOVER_PROCESS_ID_STRING, instanceId)); - command.setExecutorId(loginUser.getId()); - - if (!processService.verifyIsNeedCreateCommand(command)) { - putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionId); - return result; - } - - int create = processService.createCommand(command); - - if (create > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); - } - - return result; - } + Map execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType); /** * check if sub processes are offline before starting process definition @@ -413,167 +88,5 @@ public class ExecutorService extends BaseService { * @param processDefineId process definition id * @return check result code */ - public Map startCheckByProcessDefinedId(int processDefineId) { - Map result = new HashMap<>(); - - if (processDefineId == 0) { - logger.error("process definition id is null"); - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "process definition id"); - } - List ids = new ArrayList<>(); - processService.recurseFindSubProcessId(processDefineId, ids); - Integer[] idArray = ids.toArray(new Integer[ids.size()]); - if (!ids.isEmpty()) { - List processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); - if (processDefinitionList != null) { - for (ProcessDefinition processDefinition : processDefinitionList) { - /** - * if there is no online process, exit directly - */ - if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { - putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); - logger.info("not release process definition id: {} , name : {}", - processDefinition.getId(), processDefinition.getName()); - return result; - } - } - } - } - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * create command - * - * @param commandType commandType - * @param processDefineId processDefineId - * @param nodeDep nodeDep - * @param failureStrategy failureStrategy - * @param startNodeList startNodeList - * @param schedule schedule - * @param warningType warningType - * @param executorId executorId - * @param warningGroupId warningGroupId - * @param runMode runMode - * @param processInstancePriority processInstancePriority - * @param workerGroup workerGroup - * @return command id - */ - private int createCommand(CommandType commandType, int processDefineId, - TaskDependType nodeDep, FailureStrategy failureStrategy, - String startNodeList, String schedule, WarningType warningType, - int executorId, int warningGroupId, - RunMode runMode, Priority processInstancePriority, String workerGroup, - Map startParams) throws ParseException { - - /** - * instantiate command schedule instance - */ - Command command = new Command(); - - Map cmdParam = new HashMap<>(); - if (commandType == null) { - command.setCommandType(CommandType.START_PROCESS); - } else { - command.setCommandType(commandType); - } - command.setProcessDefinitionId(processDefineId); - if (nodeDep != null) { - command.setTaskDependType(nodeDep); - } - if (failureStrategy != null) { - command.setFailureStrategy(failureStrategy); - } - - if (StringUtils.isNotEmpty(startNodeList)) { - cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList); - } - if (warningType != null) { - command.setWarningType(warningType); - } - if (startParams != null && startParams.size() > 0) { - cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams)); - } - command.setCommandParam(JSONUtils.toJsonString(cmdParam)); - command.setExecutorId(executorId); - command.setWarningGroupId(warningGroupId); - command.setProcessInstancePriority(processInstancePriority); - command.setWorkerGroup(workerGroup); - - Date start = null; - Date end = null; - if (StringUtils.isNotEmpty(schedule)) { - String[] interval = schedule.split(","); - if (interval.length == 2) { - start = DateUtils.getScheduleDate(interval[0]); - end = DateUtils.getScheduleDate(interval[1]); - } - } - - // determine whether to complement - if (commandType == CommandType.COMPLEMENT_DATA) { - runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; - if (null != start && null != end && !start.after(end)) { - if (runMode == RunMode.RUN_MODE_SERIAL) { - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); - command.setCommandParam(JSONUtils.toJsonString(cmdParam)); - return processService.createCommand(command); - } else if (runMode == RunMode.RUN_MODE_PARALLEL) { - List schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId); - List listDate = new LinkedList<>(); - if (!CollectionUtils.isEmpty(schedules)) { - for (Schedule item : schedules) { - listDate.addAll(CronUtils.getSelfFireDateList(start, end, item.getCrontab())); - } - } - if (!CollectionUtils.isEmpty(listDate)) { - // loop by schedule date - for (Date date : listDate) { - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date)); - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date)); - command.setCommandParam(JSONUtils.toJsonString(cmdParam)); - processService.createCommand(command); - } - return listDate.size(); - } else { - // loop by day - int runCunt = 0; - while (!start.after(end)) { - runCunt += 1; - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); - command.setCommandParam(JSONUtils.toJsonString(cmdParam)); - processService.createCommand(command); - start = DateUtils.getSomeDay(start, 1); - } - return runCunt; - } - } - } else { - logger.error("there is not valid schedule date for the process definition: id:{},date:{}", - processDefineId, schedule); - } - } else { - command.setCommandParam(JSONUtils.toJsonString(cmdParam)); - return processService.createCommand(command); - } - - return 0; - } - - /** - * check result and auth - */ - private Map checkResultAndAuth(User loginUser, String projectName, Project project) { - // check project auth - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - return checkResult; - } - return null; - } - + Map startCheckByProcessDefinedId(int processDefineId); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java index 14440ee61e..ef30a40303 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java @@ -14,12 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.utils.Result; /** - * log service + * logger service */ public interface LoggerService { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java index e46ca6fcf2..51cba2ccdc 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java @@ -14,143 +14,51 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import static org.apache.dolphinscheduler.common.utils.Preconditions.checkNotNull; +import org.apache.dolphinscheduler.common.model.Server; +import org.apache.dolphinscheduler.dao.entity.User; -import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; - -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.ZookeeperMonitor; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.ZKNodeType; -import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.model.WorkerServerModel; -import org.apache.dolphinscheduler.dao.MonitorDBDao; -import org.apache.dolphinscheduler.dao.entity.MonitorRecord; -import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import com.google.common.collect.Sets; /** * monitor service */ -@Service -public class MonitorService extends BaseService { - - @Autowired - private ZookeeperMonitor zookeeperMonitor; - - @Autowired - private MonitorDBDao monitorDBDao; - /** - * query database state - * - * @param loginUser login user - * @return data base state - */ - public Map queryDatabaseState(User loginUser) { - Map result = new HashMap<>(); - - List monitorRecordList = monitorDBDao.queryDatabaseState(); - - result.put(Constants.DATA_LIST, monitorRecordList); - putMsg(result, Status.SUCCESS); - - return result; - - } - - /** - * query master list - * - * @param loginUser login user - * @return master information list - */ - public Map queryMaster(User loginUser) { - - Map result = new HashMap<>(); - - List masterServers = getServerListFromZK(true); - result.put(Constants.DATA_LIST, masterServers); - putMsg(result,Status.SUCCESS); - - return result; - } - - /** - * query zookeeper state - * - * @param loginUser login user - * @return zookeeper information list - */ - public Map queryZookeeperState(User loginUser) { - Map result = new HashMap<>(); - - List zookeeperRecordList = zookeeperMonitor.zookeeperInfoList(); - - result.put(Constants.DATA_LIST, zookeeperRecordList); - putMsg(result, Status.SUCCESS); - - return result; - - } - - - /** - * query worker list - * - * @param loginUser login user - * @return worker information list - */ - public Map queryWorker(User loginUser) { - - Map result = new HashMap<>(); - List workerServers = getServerListFromZK(false) - .stream() - .map((Server server) -> { - WorkerServerModel model = new WorkerServerModel(); - model.setId(server.getId()); - model.setHost(server.getHost()); - model.setPort(server.getPort()); - model.setZkDirectories(Sets.newHashSet(server.getZkDirectory())); - model.setResInfo(server.getResInfo()); - model.setCreateTime(server.getCreateTime()); - model.setLastHeartbeatTime(server.getLastHeartbeatTime()); - return model; - }) - .collect(Collectors.toList()); - - Map workerHostPortServerMapping = workerServers - .stream() - .collect(Collectors.toMap( - (WorkerServerModel worker) -> { - String[] s = worker.getZkDirectories().iterator().next().split("/"); - return s[s.length - 1]; - } - , Function.identity() - , (WorkerServerModel oldOne, WorkerServerModel newOne) -> { - oldOne.getZkDirectories().addAll(newOne.getZkDirectories()); - return oldOne; - })); - - result.put(Constants.DATA_LIST, workerHostPortServerMapping.values()); - putMsg(result,Status.SUCCESS); - - return result; - } - - public List getServerListFromZK(boolean isMaster) { - - checkNotNull(zookeeperMonitor); - ZKNodeType zkNodeType = isMaster ? ZKNodeType.MASTER : ZKNodeType.WORKER; - return zookeeperMonitor.getServersList(zkNodeType); - } - +public interface MonitorService { + + /** + * query database state + * + * @param loginUser login user + * @return data base state + */ + Map queryDatabaseState(User loginUser); + + /** + * query master list + * + * @param loginUser login user + * @return master information list + */ + Map queryMaster(User loginUser); + + /** + * query zookeeper state + * + * @param loginUser login user + * @return zookeeper information list + */ + Map queryZookeeperState(User loginUser); + + /** + * query worker list + * + * @param loginUser login user + * @return worker information list + */ + Map queryWorker(User loginUser); + + List getServerListFromZK(boolean isMaster); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java index 5538194db7..18208ff025 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java @@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; +/** + * process definition version service + */ public interface ProcessDefinitionVersionService { /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java index 6458a768d8..914eb2dfee 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java @@ -17,157 +17,26 @@ package org.apache.dolphinscheduler.api.service; -import static org.apache.dolphinscheduler.common.Constants.DATA_LIST; -import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT; -import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS; -import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; -import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE; -import static org.apache.dolphinscheduler.common.Constants.TASK_LIST; - -import org.apache.dolphinscheduler.api.dto.gantt.GanttDto; -import org.apache.dolphinscheduler.api.dto.gantt.Task; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DependResult; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.common.enums.TaskType; -import org.apache.dolphinscheduler.common.graph.DAG; -import org.apache.dolphinscheduler.common.model.TaskNode; -import org.apache.dolphinscheduler.common.model.TaskNodeRelation; -import org.apache.dolphinscheduler.common.process.ProcessDag; -import org.apache.dolphinscheduler.common.process.Property; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessData; -import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; -import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; -import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; -import org.apache.dolphinscheduler.dao.utils.DagHelper; -import org.apache.dolphinscheduler.service.process.ProcessService; -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; import java.text.ParseException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * process instance service */ -@Service -public class ProcessInstanceService extends BaseService { - - - private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceService.class); - - @Autowired - ProjectMapper projectMapper; - - @Autowired - ProjectService projectService; - - @Autowired - ProcessService processService; - - @Autowired - ProcessInstanceMapper processInstanceMapper; - - @Autowired - ProcessDefinitionMapper processDefineMapper; - - @Autowired - ProcessDefinitionService processDefinitionService; - - @Autowired - ProcessDefinitionVersionService processDefinitionVersionService; - - @Autowired - ExecutorService execService; - - @Autowired - TaskInstanceMapper taskInstanceMapper; - - @Autowired - LoggerService loggerService; - - - @Autowired - UsersService usersService; +public interface ProcessInstanceService { /** * return top n SUCCESS process instance order by running time which started between startTime and endTime */ - public Map queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) { - Map result = new HashMap<>(); - - Project project = projectMapper.queryByName(projectName); - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - if (0 > size) { - putMsg(result, Status.NEGTIVE_SIZE_NUMBER_ERROR, size); - return result; - } - if (Objects.isNull(startTime)) { - putMsg(result, Status.DATA_IS_NULL, Constants.START_TIME); - return result; - } - Date start = DateUtils.stringToDate(startTime); - if (Objects.isNull(endTime)) { - putMsg(result, Status.DATA_IS_NULL, Constants.END_TIME); - return result; - } - Date end = DateUtils.stringToDate(endTime); - if (start == null || end == null) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate"); - return result; - } - if (start.getTime() > end.getTime()) { - putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR, startTime, endTime); - return result; - } - - List processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS); - result.put(DATA_LIST, processInstances); - putMsg(result, Status.SUCCESS); - return result; - } + Map queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime); /** * query process instance by id @@ -177,24 +46,7 @@ public class ProcessInstanceService extends BaseService { * @param processId process instance id * @return process instance detail */ - public Map queryProcessInstanceById(User loginUser, String projectName, Integer processId) { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); - - ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); - processInstance.setWarningGroupId(processDefinition.getWarningGroupId()); - result.put(DATA_LIST, processInstance); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryProcessInstanceById(User loginUser, String projectName, Integer processId); /** * paging query process instance list, filtering according to project, process definition, time range, keyword, process status @@ -211,64 +63,10 @@ public class ProcessInstanceService extends BaseService { * @param endDate end time * @return process instance list */ - public Map queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, - String startDate, String endDate, - String searchVal, String executorName, ExecutionStatus stateType, String host, - Integer pageNo, Integer pageSize) { - - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - int[] statusArray = null; - // filter by state - if (stateType != null) { - statusArray = new int[]{stateType.ordinal()}; - } - - Date start = null; - Date end = null; - try { - if (StringUtils.isNotEmpty(startDate)) { - start = DateUtils.getScheduleDate(startDate); - } - if (StringUtils.isNotEmpty(endDate)) { - end = DateUtils.getScheduleDate(endDate); - } - } catch (Exception e) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate"); - return result; - } - - Page page = new Page<>(pageNo, pageSize); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - int executorId = usersService.getUserIdByName(executorName); - - IPage processInstanceList = - processInstanceMapper.queryProcessInstanceListPaging(page, - project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end); - - List processInstances = processInstanceList.getRecords(); - - for (ProcessInstance processInstance : processInstances) { - processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime())); - User executor = usersService.queryUser(processInstance.getExecutorId()); - if (null != executor) { - processInstance.setExecutorName(executor.getUserName()); - } - } - - pageInfo.setTotalCount((int) processInstanceList.getTotal()); - pageInfo.setLists(processInstances); - result.put(DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - return result; - } + Map queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, + String startDate, String endDate, + String searchVal, String executorName, ExecutionStatus stateType, String host, + Integer pageNo, Integer pageSize); /** * query task list by process instance id @@ -279,71 +77,9 @@ public class ProcessInstanceService extends BaseService { * @return task list for the process instance * @throws IOException io exception */ - public Map queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); - List taskInstanceList = processService.findValidTaskListByProcessId(processId); - addDependResultForTaskList(taskInstanceList); - Map resultMap = new HashMap<>(); - resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); - resultMap.put(TASK_LIST, taskInstanceList); - result.put(DATA_LIST, resultMap); - - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * add dependent result for dependent task - */ - private void addDependResultForTaskList(List taskInstanceList) throws IOException { - for (TaskInstance taskInstance : taskInstanceList) { - if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) { - Result logResult = loggerService.queryLog( - taskInstance.getId(), 0, 4098); - if (logResult.getCode() == Status.SUCCESS.ordinal()) { - String log = logResult.getData(); - Map resultMap = parseLogForDependentResult(log); - taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap)); - } - } - } - } + Map queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException; - public Map parseLogForDependentResult(String log) throws IOException { - Map resultMap = new HashMap<>(); - if (StringUtils.isEmpty(log)) { - return resultMap; - } - - BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes( - StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); - String line; - while ((line = br.readLine()) != null) { - if (line.contains(DEPENDENT_SPLIT)) { - String[] tmpStringArray = line.split(":\\|\\|"); - if (tmpStringArray.length != 2) { - continue; - } - String dependResultString = tmpStringArray[1]; - String[] dependStringArray = dependResultString.split(","); - if (dependStringArray.length != 2) { - continue; - } - String key = dependStringArray[0].trim(); - DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim()); - resultMap.put(key, dependResult); - } - } - return resultMap; - } + Map parseLogForDependentResult(String log) throws IOException; /** * query sub process instance detail info by task id @@ -353,38 +89,7 @@ public class ProcessInstanceService extends BaseService { * @param taskId task id * @return sub process instance detail */ - public Map querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId) { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - TaskInstance taskInstance = processService.findTaskInstanceById(taskId); - if (taskInstance == null) { - putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); - return result; - } - if (!taskInstance.isSubProcess()) { - putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName()); - return result; - } - - ProcessInstance subWorkflowInstance = processService.findSubProcessInstance( - taskInstance.getProcessInstanceId(), taskInstance.getId()); - if (subWorkflowInstance == null) { - putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId); - return result; - } - Map dataMap = new HashMap<>(); - dataMap.put("subProcessInstanceId", subWorkflowInstance.getId()); - result.put(DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } + Map querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId); /** * update process instance @@ -401,97 +106,9 @@ public class ProcessInstanceService extends BaseService { * @return update result code * @throws ParseException parse exception for json parse */ - public Map updateProcessInstance(User loginUser, String projectName, Integer processInstanceId, - String processInstanceJson, String scheduleTime, Boolean syncDefine, - Flag flag, String locations, String connects) throws ParseException { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - //check project permission - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - //check process instance exists - ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); - if (processInstance == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); - return result; - } - - //check process instance status - if (!processInstance.getState().typeIsFinished()) { - putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, - processInstance.getName(), processInstance.getState().toString(), "update"); - return result; - } - Date schedule = null; - schedule = processInstance.getScheduleTime(); - if (scheduleTime != null) { - schedule = DateUtils.getScheduleDate(scheduleTime); - } - processInstance.setScheduleTime(schedule); - processInstance.setLocations(locations); - processInstance.setConnects(connects); - String globalParams = null; - String originDefParams = null; - int timeout = processInstance.getTimeout(); - ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); - if (StringUtils.isNotEmpty(processInstanceJson)) { - ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class); - //check workflow json is valid - Map checkFlowJson = processDefinitionService.checkProcessNodeList(processData, processInstanceJson); - if (checkFlowJson.get(Constants.STATUS) != Status.SUCCESS) { - return result; - } - - originDefParams = JSONUtils.toJsonString(processData.getGlobalParams()); - List globalParamList = processData.getGlobalParams(); - Map globalParamMap = Optional.ofNullable(globalParamList).orElse(Collections.emptyList()).stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); - globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, - processInstance.getCmdTypeIfComplement(), schedule); - timeout = processData.getTimeout(); - processInstance.setTimeout(timeout); - Tenant tenant = processService.getTenantForProcess(processData.getTenantId(), - processDefinition.getUserId()); - if (tenant != null) { - processInstance.setTenantCode(tenant.getTenantCode()); - } - // get the processinstancejson before saving,and then save the name and taskid - String oldJson = processInstance.getProcessInstanceJson(); - if (StringUtils.isNotEmpty(oldJson)) { - processInstanceJson = processService.changeJson(processData,oldJson); - } - processInstance.setProcessInstanceJson(processInstanceJson); - processInstance.setGlobalParams(globalParams); - } - - int update = processService.updateProcessInstance(processInstance); - int updateDefine = 1; - if (Boolean.TRUE.equals(syncDefine)) { - processDefinition.setProcessDefinitionJson(processInstanceJson); - processDefinition.setGlobalParams(originDefParams); - processDefinition.setLocations(locations); - processDefinition.setConnects(connects); - processDefinition.setTimeout(timeout); - processDefinition.setUpdateTime(new Date()); - - // add process definition version - long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition); - processDefinition.setVersion(version); - updateDefine = processDefineMapper.updateById(processDefinition); - } - if (update > 0 && updateDefine > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR); - } - - return result; - - } + Map updateProcessInstance(User loginUser, String projectName, Integer processInstanceId, + String processInstanceJson, String scheduleTime, Boolean syncDefine, + Flag flag, String locations, String connects) throws ParseException; /** * query parent process instance detail info by sub process instance id @@ -501,37 +118,7 @@ public class ProcessInstanceService extends BaseService { * @param subId sub process id * @return parent instance detail */ - public Map queryParentInstanceBySubId(User loginUser, String projectName, Integer subId) { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId); - if (subInstance == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId); - return result; - } - if (subInstance.getIsSubProcess() == Flag.NO) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName()); - return result; - } - - ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId); - if (parentWorkflowInstance == null) { - putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST); - return result; - } - Map dataMap = new HashMap<>(); - dataMap.put("parentWorkflowInstance", parentWorkflowInstance.getId()); - result.put(DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } + Map queryParentInstanceBySubId(User loginUser, String projectName, Integer subId); /** * delete process instance by id, at the same time,delete task instance and their mapping relation data @@ -541,38 +128,7 @@ public class ProcessInstanceService extends BaseService { * @param processInstanceId process instance id * @return delete result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) { - - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); - if (null == processInstance) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); - return result; - } - - processService.removeTaskLogFile(processInstanceId); - // delete database cascade - int delete = processService.deleteWorkProcessInstanceById(processInstanceId); - - processService.deleteAllSubWorkProcessByParentId(processInstanceId); - processService.deleteWorkProcessMapByParentId(processInstanceId); - - if (delete > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR); - } - - return result; - } + Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId); /** * view process instance variables @@ -580,71 +136,7 @@ public class ProcessInstanceService extends BaseService { * @param processInstanceId process instance id * @return variables data */ - public Map viewVariables(Integer processInstanceId) { - Map result = new HashMap<>(); - - ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); - - if (processInstance == null) { - throw new RuntimeException("workflow instance is null"); - } - - Map timeParams = BusinessTimeUtils - .getBusinessTime(processInstance.getCmdTypeIfComplement(), - processInstance.getScheduleTime()); - - String workflowInstanceJson = processInstance.getProcessInstanceJson(); - - ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class); - - String userDefinedParams = processInstance.getGlobalParams(); - - // global params - List globalParams = new ArrayList<>(); - - if (userDefinedParams != null && userDefinedParams.length() > 0) { - globalParams = JSONUtils.toList(userDefinedParams, Property.class); - } - - List taskNodeList = workflowData.getTasks(); - - // global param string - String globalParamStr = JSONUtils.toJsonString(globalParams); - globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams); - globalParams = JSONUtils.toList(globalParamStr, Property.class); - for (Property property : globalParams) { - timeParams.put(property.getProp(), property.getValue()); - } - - // local params - Map> localUserDefParams = new HashMap<>(); - for (TaskNode taskNode : taskNodeList) { - String parameter = taskNode.getParams(); - Map map = JSONUtils.toMap(parameter); - String localParams = map.get(LOCAL_PARAMS); - if (localParams != null && !localParams.isEmpty()) { - localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams); - List localParamsList = JSONUtils.toList(localParams, Property.class); - - Map localParamsMap = new HashMap<>(); - localParamsMap.put("taskType", taskNode.getType()); - localParamsMap.put("localParamsList", localParamsList); - if (CollectionUtils.isNotEmpty(localParamsList)) { - localUserDefParams.put(taskNode.getName(), localParamsMap); - } - } - - } - - Map resultMap = new HashMap<>(); - - resultMap.put(GLOBAL_PARAMS, globalParams); - resultMap.put(LOCAL_PARAMS, localUserDefParams); - - result.put(DATA_LIST, resultMap); - putMsg(result, Status.SUCCESS); - return result; - } + Map viewVariables(Integer processInstanceId); /** * encapsulation gantt structure @@ -653,67 +145,7 @@ public class ProcessInstanceService extends BaseService { * @return gantt tree data * @throws Exception exception when json parse */ - public Map viewGantt(Integer processInstanceId) throws Exception { - Map result = new HashMap<>(); - - ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); - - if (processInstance == null) { - throw new RuntimeException("workflow instance is null"); - } - - GanttDto ganttDto = new GanttDto(); - - DAG dag = processInstance2DAG(processInstance); - //topological sort - List nodeList = dag.topologicalSort(); - - ganttDto.setTaskNames(nodeList); - - List taskList = new ArrayList<>(); - for (String node : nodeList) { - TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node); - if (taskInstance == null) { - continue; - } - Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); - Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); - Task task = new Task(); - task.setTaskName(taskInstance.getName()); - task.getStartDate().add(startTime.getTime()); - task.getEndDate().add(endTime.getTime()); - task.setIsoStart(startTime); - task.setIsoEnd(endTime); - task.setStatus(taskInstance.getState().toString()); - task.setExecutionDate(taskInstance.getStartTime()); - task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime())); - taskList.add(task); - } - ganttDto.setTasks(taskList); - - result.put(DATA_LIST, ganttDto); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * process instance to DAG - * - * @param processInstance input process instance - * @return process instance dag. - */ - private static DAG processInstance2DAG(ProcessInstance processInstance) { - - String processDefinitionJson = processInstance.getProcessInstanceJson(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - List taskNodeList = processData.getTasks(); - - ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); - - return DagHelper.buildDagGraph(processDag); - } + Map viewGantt(Integer processInstanceId) throws Exception; /** * query process instance by processDefinitionId and stateArray @@ -721,9 +153,7 @@ public class ProcessInstanceService extends BaseService { * @param states states array * @return process instance list */ - public List queryByProcessDefineIdAndStatus(int processDefinitionId, int[] states) { - return processInstanceMapper.queryByProcessDefineIdAndStatus(processDefinitionId, states); - } + List queryByProcessDefineIdAndStatus(int processDefinitionId, int[] states); /** * query process instance by processDefinitionId @@ -731,8 +161,6 @@ public class ProcessInstanceService extends BaseService { * @param size size * @return process instance list */ - public List queryByProcessDefineId(int processDefinitionId,int size) { - return processInstanceMapper.queryByProcessDefineId(processDefinitionId, size); - } + List queryByProcessDefineId(int processDefinitionId,int size); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java index 4753725d31..205393f527 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.dao.entity.Project; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java index 23de453e11..24f6189afc 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java @@ -14,43 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.dao.entity.Queue; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.QueueMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang.StringUtils; -import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import java.util.Date; -import java.util.HashMap; -import java.util.List; import java.util.Map; /** * queue service */ -@Service -public class QueueService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(QueueService.class); - - @Autowired - private QueueMapper queueMapper; - - @Autowired - private UserMapper userMapper; +public interface QueueService { /** * query queue list @@ -58,18 +33,7 @@ public class QueueService extends BaseService { * @param loginUser login user * @return queue list */ - public Map queryList(User loginUser) { - Map result = new HashMap<>(); - if (isNotAdmin(loginUser, result)) { - return result; - } - - List queueList = queueMapper.selectList(null); - result.put(Constants.DATA_LIST, queueList); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryList(User loginUser); /** * query queue list paging @@ -80,26 +44,7 @@ public class QueueService extends BaseService { * @param pageSize page size * @return queue list */ - public Map queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(); - if (isNotAdmin(loginUser, result)) { - return result; - } - - Page page = new Page(pageNo, pageSize); - - - IPage queueList = queueMapper.queryQueuePaging(page, searchVal); - - Integer count = (int) queueList.getTotal(); - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount(count); - pageInfo.setLists(queueList.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize); /** * create queue @@ -109,45 +54,7 @@ public class QueueService extends BaseService { * @param queueName queue name * @return create result */ - public Map createQueue(User loginUser, String queue, String queueName) { - Map result = new HashMap<>(); - if (isNotAdmin(loginUser, result)) { - return result; - } - - if (StringUtils.isEmpty(queue)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queue"); - return result; - } - - if (StringUtils.isEmpty(queueName)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queueName"); - return result; - } - - if (checkQueueNameExist(queueName)) { - putMsg(result, Status.QUEUE_NAME_EXIST, queueName); - return result; - } - - if (checkQueueExist(queue)) { - putMsg(result, Status.QUEUE_VALUE_EXIST, queue); - return result; - } - - Queue queueObj = new Queue(); - Date now = new Date(); - - queueObj.setQueue(queue); - queueObj.setQueueName(queueName); - queueObj.setCreateTime(now); - queueObj.setUpdateTime(now); - - queueMapper.insert(queueObj); - putMsg(result, Status.SUCCESS); - - return result; - } + Map createQueue(User loginUser, String queue, String queueName); /** * update queue @@ -158,66 +65,7 @@ public class QueueService extends BaseService { * @param queueName queue name * @return update result code */ - public Map updateQueue(User loginUser, int id, String queue, String queueName) { - Map result = new HashMap<>(); - if (isNotAdmin(loginUser, result)) { - return result; - } - - if (StringUtils.isEmpty(queue)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queue"); - return result; - } - - if (StringUtils.isEmpty(queueName)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queueName"); - return result; - } - - Queue queueObj = queueMapper.selectById(id); - if (queueObj == null) { - putMsg(result, Status.QUEUE_NOT_EXIST, id); - return result; - } - - // whether queue value or queueName is changed - if (queue.equals(queueObj.getQueue()) && queueName.equals(queueObj.getQueueName())) { - putMsg(result, Status.NEED_NOT_UPDATE_QUEUE); - return result; - } - - // check queue name is exist - if (!queueName.equals(queueObj.getQueueName()) - && checkQueueNameExist(queueName)) { - putMsg(result, Status.QUEUE_NAME_EXIST, queueName); - return result; - } - - // check queue value is exist - if (!queue.equals(queueObj.getQueue()) && checkQueueExist(queue)) { - putMsg(result, Status.QUEUE_VALUE_EXIST, queue); - return result; - } - - // check old queue using by any user - if (checkIfQueueIsInUsing(queueObj.getQueueName(), queueName)) { - //update user related old queue - Integer relatedUserNums = userMapper.updateUserQueue(queueObj.getQueueName(), queueName); - logger.info("old queue have related {} user, exec update user success.", relatedUserNums); - } - - // update queue - Date now = new Date(); - queueObj.setQueue(queue); - queueObj.setQueueName(queueName); - queueObj.setUpdateTime(now); - - queueMapper.updateById(queueObj); - - putMsg(result, Status.SUCCESS); - - return result; - } + Map updateQueue(User loginUser, int id, String queue, String queueName); /** * verify queue and queueName @@ -226,69 +74,6 @@ public class QueueService extends BaseService { * @param queueName queue name * @return true if the queue name not exists, otherwise return false */ - public Result verifyQueue(String queue, String queueName) { - Result result = new Result(); - - if (StringUtils.isEmpty(queue)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queue"); - return result; - } - - if (StringUtils.isEmpty(queueName)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queueName"); - return result; - } - - - if (checkQueueNameExist(queueName)) { - logger.error("queue name {} has exist, can't create again.", queueName); - putMsg(result, Status.QUEUE_NAME_EXIST, queueName); - return result; - } - - if (checkQueueExist(queue)) { - logger.error("queue value {} has exist, can't create again.", queue); - putMsg(result, Status.QUEUE_VALUE_EXIST, queue); - return result; - } - - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * check queue exist - * if exists return true,not exists return false - * check queue exist - * - * @param queue queue - * @return true if the queue not exists, otherwise return false - */ - private boolean checkQueueExist(String queue) { - return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(queue, null)); - } - - /** - * check queue name exist - * if exists return true,not exists return false - * - * @param queueName queue name - * @return true if the queue name not exists, otherwise return false - */ - private boolean checkQueueNameExist(String queueName) { - return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(null, queueName)); - } - - /** - * check old queue name using by any user - * if need to update user - * - * @param oldQueue old queue name - * @param newQueue new queue name - * @return true if need to update user - */ - private boolean checkIfQueueIsInUsing (String oldQueue, String newQueue) { - return !oldQueue.equals(newQueue) && CollectionUtils.isNotEmpty(userMapper.queryUserListByQueue(oldQueue)); - } + Result verifyQueue(String queue, String queueName); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java index e7d8906f28..bb778dd4eb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java @@ -14,68 +14,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import com.fasterxml.jackson.databind.SerializationFeature; -import org.apache.commons.collections.BeanMap; -import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; -import org.apache.dolphinscheduler.api.dto.resources.filter.ResourceFilter; -import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor; -import org.apache.dolphinscheduler.api.dto.resources.visitor.Visitor; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.exceptions.ServiceException; -import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.enums.ResourceType; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.entity.*; -import org.apache.dolphinscheduler.dao.mapper.*; -import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.dao.DuplicateKeyException; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.web.multipart.MultipartFile; +import org.apache.dolphinscheduler.dao.entity.User; import java.io.IOException; -import java.text.MessageFormat; -import java.util.*; -import java.util.regex.Matcher; -import java.util.stream.Collectors; +import java.util.Map; -import static org.apache.dolphinscheduler.common.Constants.*; +import org.springframework.web.multipart.MultipartFile; /** * resources service */ -@Service -public class ResourcesService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(ResourcesService.class); - - @Autowired - private ResourceMapper resourcesMapper; - - @Autowired - private UdfFuncMapper udfFunctionMapper; - - @Autowired - private TenantMapper tenantMapper; - - @Autowired - private UserMapper userMapper; - - @Autowired - private ResourceUserMapper resourceUserMapper; - - @Autowired - private ProcessDefinitionMapper processDefinitionMapper; +public interface ResourcesService { /** * create directory @@ -88,74 +43,12 @@ public class ResourcesService extends BaseService { * @param currentDir current directory * @return create directory result */ - @Transactional(rollbackFor = Exception.class) - public Result createDirectory(User loginUser, - String name, - String description, - ResourceType type, - int pid, - String currentDir) { - Result result = new Result(); - // if hdfs not startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); - result = verifyResourceName(fullName,type,loginUser); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - return result; - } - if (pid != -1) { - Resource parentResource = resourcesMapper.selectById(pid); - - if (parentResource == null) { - putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); - return result; - } - - if (!hasPerm(loginUser, parentResource.getUserId())) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - } - - - if (checkResourceExists(fullName, 0, type.ordinal())) { - logger.error("resource directory {} has exist, can't recreate", fullName); - putMsg(result, Status.RESOURCE_EXIST); - return result; - } - - Date now = new Date(); - - Resource resource = new Resource(pid,name,fullName,true,description,name,loginUser.getId(),type,0,now,now); - - try { - resourcesMapper.insert(resource); - - putMsg(result, Status.SUCCESS); - Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap(); - for (Map.Entry entry: dataMap.entrySet()) { - if (!"class".equalsIgnoreCase(entry.getKey().toString())) { - resultMap.put(entry.getKey().toString(), entry.getValue()); - } - } - result.setData(resultMap); - } catch (DuplicateKeyException e) { - logger.error("resource directory {} has exist, can't recreate", fullName); - putMsg(result, Status.RESOURCE_EXIST); - return result; - } catch (Exception e) { - logger.error("resource already exists, can't recreate ", e); - throw new RuntimeException("resource already exists, can't recreate"); - } - //create directory in hdfs - createDirecotry(loginUser,fullName,type,result); - return result; - } + Result createDirectory(User loginUser, + String name, + String description, + ResourceType type, + int pid, + String currentDir); /** * create resource @@ -169,121 +62,13 @@ public class ResourcesService extends BaseService { * @param currentDir current directory * @return create result code */ - @Transactional(rollbackFor = Exception.class) - public Result createResource(User loginUser, - String name, - String desc, - ResourceType type, - MultipartFile file, - int pid, - String currentDir) { - Result result = new Result(); - - // if hdfs not startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - if (pid != -1) { - Resource parentResource = resourcesMapper.selectById(pid); - - if (parentResource == null) { - putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); - return result; - } - - if (!hasPerm(loginUser, parentResource.getUserId())) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - } - - // file is empty - if (file.isEmpty()) { - logger.error("file is empty: {}", file.getOriginalFilename()); - putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); - return result; - } - - // file suffix - String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); - String nameSuffix = FileUtils.suffix(name); - - // determine file suffix - if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { - /** - * rename file suffix and original suffix must be consistent - */ - logger.error("rename file suffix and original suffix must be consistent: {}", file.getOriginalFilename()); - putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); - return result; - } - - //If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar - if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(fileSuffix)) { - logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); - putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); - return result; - } - if (file.getSize() > Constants.MAX_FILE_SIZE) { - logger.error("file size is too large: {}", file.getOriginalFilename()); - putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); - return result; - } - - // check resoure name exists - String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); - if (checkResourceExists(fullName, 0, type.ordinal())) { - logger.error("resource {} has exist, can't recreate", name); - putMsg(result, Status.RESOURCE_EXIST); - return result; - } - - Date now = new Date(); - Resource resource = new Resource(pid,name,fullName,false,desc,file.getOriginalFilename(),loginUser.getId(),type,file.getSize(),now,now); - - try { - resourcesMapper.insert(resource); - - putMsg(result, Status.SUCCESS); - Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap<>(); - for (Map.Entry entry: dataMap.entrySet()) { - if (!"class".equalsIgnoreCase(entry.getKey().toString())) { - resultMap.put(entry.getKey().toString(), entry.getValue()); - } - } - result.setData(resultMap); - } catch (Exception e) { - logger.error("resource already exists, can't recreate ", e); - throw new RuntimeException("resource already exists, can't recreate"); - } - - // fail upload - if (!upload(loginUser, fullName, file, type)) { - logger.error("upload resource: {} file: {} failed.", name, file.getOriginalFilename()); - putMsg(result, Status.HDFS_OPERATION_ERROR); - throw new RuntimeException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); - } - return result; - } - - /** - * check resource is exists - * - * @param fullName fullName - * @param userId user id - * @param type type - * @return true if resource exists - */ - private boolean checkResourceExists(String fullName, int userId, int type ){ - - List resources = resourcesMapper.queryResourceList(fullName, userId, type); - return resources != null && resources.size() > 0; - } - + Result createResource(User loginUser, + String name, + String desc, + ResourceType type, + MultipartFile file, + int pid, + String currentDir); /** * update resource @@ -295,239 +80,12 @@ public class ResourcesService extends BaseService { * @param file resource file * @return update result code */ - @Transactional(rollbackFor = Exception.class) - public Result updateResource(User loginUser, - int resourceId, - String name, - String desc, - ResourceType type, - MultipartFile file) { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - Resource resource = resourcesMapper.selectById(resourceId); - if (resource == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - if (!hasPerm(loginUser, resource.getUserId())) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - if (file == null && name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) { - putMsg(result, Status.SUCCESS); - return result; - } - - //check resource aleady exists - String originFullName = resource.getFullName(); - String originResourceName = resource.getAlias(); - - String fullName = String.format("%s%s",originFullName.substring(0,originFullName.lastIndexOf("/")+1),name); - if (!originResourceName.equals(name) && checkResourceExists(fullName, 0, type.ordinal())) { - logger.error("resource {} already exists, can't recreate", name); - putMsg(result, Status.RESOURCE_EXIST); - return result; - } - - if (file != null) { - - // file is empty - if (file.isEmpty()) { - logger.error("file is empty: {}", file.getOriginalFilename()); - putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); - return result; - } - - // file suffix - String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); - String nameSuffix = FileUtils.suffix(name); - - // determine file suffix - if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { - /** - * rename file suffix and original suffix must be consistent - */ - logger.error("rename file suffix and original suffix must be consistent: {}", file.getOriginalFilename()); - putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); - return result; - } - - //If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar - if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(FileUtils.suffix(originFullName))) { - logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); - putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); - return result; - } - if (file.getSize() > Constants.MAX_FILE_SIZE) { - logger.error("file size is too large: {}", file.getOriginalFilename()); - putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); - return result; - } - } - - // query tenant by user id - String tenantCode = getTenantCode(resource.getUserId(),result); - if (StringUtils.isEmpty(tenantCode)){ - return result; - } - // verify whether the resource exists in storage - // get the path of origin file in storage - String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName); - try { - if (!HadoopUtils.getInstance().exists(originHdfsFileName)) { - logger.error("{} not exist", originHdfsFileName); - putMsg(result,Status.RESOURCE_NOT_EXIST); - return result; - } - } catch (IOException e) { - logger.error(e.getMessage(),e); - throw new ServiceException(Status.HDFS_OPERATION_ERROR); - } - - if (!resource.isDirectory()) { - //get the origin file suffix - String originSuffix = FileUtils.suffix(originFullName); - String suffix = FileUtils.suffix(fullName); - boolean suffixIsChanged = false; - if (StringUtils.isBlank(suffix) && StringUtils.isNotBlank(originSuffix)) { - suffixIsChanged = true; - } - if (StringUtils.isNotBlank(suffix) && !suffix.equals(originSuffix)) { - suffixIsChanged = true; - } - //verify whether suffix is changed - if (suffixIsChanged) { - //need verify whether this resource is authorized to other users - Map columnMap = new HashMap<>(); - columnMap.put("resources_id", resourceId); - - List resourcesUsers = resourceUserMapper.selectByMap(columnMap); - if (CollectionUtils.isNotEmpty(resourcesUsers)) { - List userIds = resourcesUsers.stream().map(ResourcesUser::getUserId).collect(Collectors.toList()); - List users = userMapper.selectBatchIds(userIds); - String userNames = users.stream().map(User::getUserName).collect(Collectors.toList()).toString(); - logger.error("resource is authorized to user {},suffix not allowed to be modified", userNames); - putMsg(result,Status.RESOURCE_IS_AUTHORIZED,userNames); - return result; - } - } - } - - // updateResource data - Date now = new Date(); - - resource.setAlias(name); - resource.setFullName(fullName); - resource.setDescription(desc); - resource.setUpdateTime(now); - if (file != null) { - resource.setFileName(file.getOriginalFilename()); - resource.setSize(file.getSize()); - } - - try { - resourcesMapper.updateById(resource); - if (resource.isDirectory()) { - List childrenResource = listAllChildren(resource,false); - if (CollectionUtils.isNotEmpty(childrenResource)) { - String matcherFullName = Matcher.quoteReplacement(fullName); - List childResourceList = new ArrayList<>(); - Integer[] childResIdArray = childrenResource.toArray(new Integer[childrenResource.size()]); - List resourceList = resourcesMapper.listResourceByIds(childResIdArray); - childResourceList = resourceList.stream().map(t -> { - t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName)); - t.setUpdateTime(now); - return t; - }).collect(Collectors.toList()); - resourcesMapper.batchUpdateResource(childResourceList); - - if (ResourceType.UDF.equals(resource.getType())) { - List udfFuncs = udfFunctionMapper.listUdfByResourceId(childResIdArray); - if (CollectionUtils.isNotEmpty(udfFuncs)) { - udfFuncs = udfFuncs.stream().map(t -> { - t.setResourceName(t.getResourceName().replaceFirst(originFullName, matcherFullName)); - t.setUpdateTime(now); - return t; - }).collect(Collectors.toList()); - udfFunctionMapper.batchUpdateUdfFunc(udfFuncs); - } - } - } - } else if (ResourceType.UDF.equals(resource.getType())) { - List udfFuncs = udfFunctionMapper.listUdfByResourceId(new Integer[]{resourceId}); - if (CollectionUtils.isNotEmpty(udfFuncs)) { - udfFuncs = udfFuncs.stream().map(t -> { - t.setResourceName(fullName); - t.setUpdateTime(now); - return t; - }).collect(Collectors.toList()); - udfFunctionMapper.batchUpdateUdfFunc(udfFuncs); - } - - } - - putMsg(result, Status.SUCCESS); - Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap<>(5); - for (Map.Entry entry: dataMap.entrySet()) { - if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { - resultMap.put(entry.getKey().toString(), entry.getValue()); - } - } - result.setData(resultMap); - } catch (Exception e) { - logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e); - throw new ServiceException(Status.UPDATE_RESOURCE_ERROR); - } - - // if name unchanged, return directly without moving on HDFS - if (originResourceName.equals(name) && file == null) { - return result; - } - - if (file != null) { - // fail upload - if (!upload(loginUser, fullName, file, type)) { - logger.error("upload resource: {} file: {} failed.", name, file.getOriginalFilename()); - putMsg(result, Status.HDFS_OPERATION_ERROR); - throw new RuntimeException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); - } - if (!fullName.equals(originFullName)) { - try { - HadoopUtils.getInstance().delete(originHdfsFileName,false); - } catch (IOException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(String.format("delete resource: %s failed.", originFullName)); - } - } - return result; - } - - - // get the path of dest file in hdfs - String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName); - - - try { - logger.info("start hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName); - HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true); - } catch (Exception e) { - logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e); - putMsg(result,Status.HDFS_COPY_FAIL); - throw new ServiceException(Status.HDFS_COPY_FAIL); - } - - return result; - - } + Result updateResource(User loginUser, + int resourceId, + String name, + String desc, + ResourceType type, + MultipartFile file); /** * query resources list paging @@ -539,99 +97,7 @@ public class ResourcesService extends BaseService { * @param pageSize page size * @return resource list page */ - public Map queryResourceListPaging(User loginUser, int direcotryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { - - HashMap result = new HashMap<>(5); - Page page = new Page(pageNo, pageSize); - int userId = loginUser.getId(); - if (isAdmin(loginUser)) { - userId= 0; - } - if (direcotryId != -1) { - Resource directory = resourcesMapper.selectById(direcotryId); - if (directory == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - } - - IPage resourceIPage = resourcesMapper.queryResourcePaging(page, - userId,direcotryId, type.ordinal(), searchVal); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int)resourceIPage.getTotal()); - pageInfo.setLists(resourceIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result,Status.SUCCESS); - return result; - } - - /** - * create direcoty - * @param loginUser login user - * @param fullName full name - * @param type resource type - * @param result Result - */ - private void createDirecotry(User loginUser,String fullName,ResourceType type,Result result){ - // query tenant - String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); - String directoryName = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); - String resourceRootPath = HadoopUtils.getHdfsDir(type,tenantCode); - try { - if (!HadoopUtils.getInstance().exists(resourceRootPath)) { - createTenantDirIfNotExists(tenantCode); - } - - if (!HadoopUtils.getInstance().mkdir(directoryName)) { - logger.error("create resource directory {} of hdfs failed",directoryName); - putMsg(result,Status.HDFS_OPERATION_ERROR); - throw new RuntimeException(String.format("create resource directory: %s failed.", directoryName)); - } - } catch (Exception e) { - logger.error("create resource directory {} of hdfs failed",directoryName); - putMsg(result,Status.HDFS_OPERATION_ERROR); - throw new RuntimeException(String.format("create resource directory: %s failed.", directoryName)); - } - } - - /** - * upload file to hdfs - * - * @param loginUser login user - * @param fullName full name - * @param file file - */ - private boolean upload(User loginUser, String fullName, MultipartFile file, ResourceType type) { - // save to local - String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); - String nameSuffix = FileUtils.suffix(fullName); - - // determine file suffix - if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { - return false; - } - // query tenant - String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); - // random file name - String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); - - - // save file to hdfs, and delete original file - String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); - String resourcePath = HadoopUtils.getHdfsDir(type,tenantCode); - try { - // if tenant dir not exists - if (!HadoopUtils.getInstance().exists(resourcePath)) { - createTenantDirIfNotExists(tenantCode); - } - org.apache.dolphinscheduler.api.utils.FileUtils.copyFile(file, localFilename); - HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true); - } catch (Exception e) { - logger.error(e.getMessage(), e); - return false; - } - return true; - } + Map queryResourceListPaging(User loginUser, int directoryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize); /** * query resource list @@ -640,21 +106,7 @@ public class ResourcesService extends BaseService { * @param type resource type * @return resource list */ - public Map queryResourceList(User loginUser, ResourceType type) { - - Map result = new HashMap<>(5); - - int userId = loginUser.getId(); - if(isAdmin(loginUser)){ - userId = 0; - } - List allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0); - Visitor resourceTreeVisitor = new ResourceTreeVisitor(allResourceList); - result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); - putMsg(result,Status.SUCCESS); - - return result; - } + Map queryResourceList(User loginUser, ResourceType type); /** * query resource list by program type @@ -663,33 +115,7 @@ public class ResourcesService extends BaseService { * @param type resource type * @return resource list */ - public Map queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType) { - - Map result = new HashMap<>(5); - String suffix = ".jar"; - int userId = loginUser.getId(); - if(isAdmin(loginUser)){ - userId = 0; - } - if (programType != null) { - switch (programType) { - case JAVA: - break; - case SCALA: - break; - case PYTHON: - suffix = ".py"; - break; - } - } - List allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0); - List resources = new ResourceFilter(suffix,new ArrayList<>(allResourceList)).filter(); - Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources); - result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); - putMsg(result,Status.SUCCESS); - - return result; - } + Map queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType); /** * delete resource @@ -697,82 +123,9 @@ public class ResourcesService extends BaseService { * @param loginUser login user * @param resourceId resource id * @return delete result code - * @throws Exception exception + * @throws IOException exception */ - @Transactional(rollbackFor = Exception.class) - public Result delete(User loginUser, int resourceId) throws Exception { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - //get resource and hdfs path - Resource resource = resourcesMapper.selectById(resourceId); - if (resource == null) { - logger.error("resource file not exist, resource id {}", resourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - if (!hasPerm(loginUser, resource.getUserId())) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - String tenantCode = getTenantCode(resource.getUserId(),result); - if (StringUtils.isEmpty(tenantCode)){ - return result; - } - - // get all resource id of process definitions those is released - List> list = processDefinitionMapper.listResources(); - Map> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list); - Set resourceIdSet = resourceProcessMap.keySet(); - // get all children of the resource - List allChildren = listAllChildren(resource,true); - Integer[] needDeleteResourceIdArray = allChildren.toArray(new Integer[allChildren.size()]); - - //if resource type is UDF,need check whether it is bound by UDF functon - if (resource.getType() == (ResourceType.UDF)) { - List udfFuncs = udfFunctionMapper.listUdfByResourceId(needDeleteResourceIdArray); - if (CollectionUtils.isNotEmpty(udfFuncs)) { - logger.error("can't be deleted,because it is bound by UDF functions:{}",udfFuncs.toString()); - putMsg(result,Status.UDF_RESOURCE_IS_BOUND,udfFuncs.get(0).getFuncName()); - return result; - } - } - - if (resourceIdSet.contains(resource.getPid())) { - logger.error("can't be deleted,because it is used of process definition"); - putMsg(result, Status.RESOURCE_IS_USED); - return result; - } - resourceIdSet.retainAll(allChildren); - if (CollectionUtils.isNotEmpty(resourceIdSet)) { - logger.error("can't be deleted,because it is used of process definition"); - for (Integer resId : resourceIdSet) { - logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId)); - } - putMsg(result, Status.RESOURCE_IS_USED); - return result; - } - - // get hdfs file by type - String hdfsFilename = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName()); - - //delete data in database - resourcesMapper.deleteIds(needDeleteResourceIdArray); - resourceUserMapper.deleteResourceUserArray(0, needDeleteResourceIdArray); - - //delete file on hdfs - HadoopUtils.getInstance().delete(hdfsFilename, true); - putMsg(result, Status.SUCCESS); - - return result; - } + Result delete(User loginUser, int resourceId) throws IOException; /** * verify resource by name and type @@ -781,37 +134,7 @@ public class ResourcesService extends BaseService { * @param type resource type * @return true if the resource name not exists, otherwise return false */ - public Result verifyResourceName(String fullName, ResourceType type,User loginUser) { - Result result = new Result(); - putMsg(result, Status.SUCCESS); - if (checkResourceExists(fullName, 0, type.ordinal())) { - logger.error("resource type:{} name:{} has exist, can't create again.", type, fullName); - putMsg(result, Status.RESOURCE_EXIST); - } else { - // query tenant - Tenant tenant = tenantMapper.queryById(loginUser.getTenantId()); - if(tenant != null){ - String tenantCode = tenant.getTenantCode(); - - try { - String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); - if(HadoopUtils.getInstance().exists(hdfsFilename)){ - logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, fullName,hdfsFilename); - putMsg(result, Status.RESOURCE_FILE_EXIST,hdfsFilename); - } - - } catch (Exception e) { - logger.error(e.getMessage(),e); - putMsg(result,Status.HDFS_OPERATION_ERROR); - } - }else{ - putMsg(result,Status.TENANT_NOT_EXIST); - } - } - - - return result; - } + Result verifyResourceName(String fullName, ResourceType type,User loginUser); /** * verify resource by full name or pid and type @@ -820,40 +143,7 @@ public class ResourcesService extends BaseService { * @param type resource type * @return true if the resource full name or pid not exists, otherwise return false */ - public Result queryResource(String fullName,Integer id,ResourceType type) { - Result result = new Result(); - if (StringUtils.isBlank(fullName) && id == null) { - logger.error("You must input one of fullName and pid"); - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - return result; - } - if (StringUtils.isNotBlank(fullName)) { - List resourceList = resourcesMapper.queryResource(fullName,type.ordinal()); - if (CollectionUtils.isEmpty(resourceList)) { - logger.error("resource file not exist, resource full name {} ", fullName); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - putMsg(result, Status.SUCCESS); - result.setData(resourceList.get(0)); - } else { - Resource resource = resourcesMapper.selectById(id); - if (resource == null) { - logger.error("resource file not exist, resource id {}", id); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - Resource parentResource = resourcesMapper.selectById(resource.getPid()); - if (parentResource == null) { - logger.error("parent resource file not exist, resource id {}", id); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - putMsg(result, Status.SUCCESS); - result.setData(parentResource); - } - return result; - } + Result queryResource(String fullName,Integer id,ResourceType type); /** * view resource file online @@ -863,64 +153,7 @@ public class ResourcesService extends BaseService { * @param limit limit * @return resource content */ - public Result readResource(int resourceId, int skipLineNum, int limit) { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - // get resource by id - Resource resource = resourcesMapper.selectById(resourceId); - if (resource == null) { - logger.error("resource file not exist, resource id {}", resourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - //check preview or not by file suffix - String nameSuffix = FileUtils.suffix(resource.getAlias()); - String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); - if (StringUtils.isNotEmpty(resourceViewSuffixs)) { - List strList = Arrays.asList(resourceViewSuffixs.split(",")); - if (!strList.contains(nameSuffix)) { - logger.error("resource suffix {} not support view, resource id {}", nameSuffix, resourceId); - putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); - return result; - } - } - - String tenantCode = getTenantCode(resource.getUserId(),result); - if (StringUtils.isEmpty(tenantCode)){ - return result; - } - - // hdfs path - String hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resource.getFullName()); - logger.info("resource hdfs path is {} ", hdfsFileName); - try { - if(HadoopUtils.getInstance().exists(hdfsFileName)){ - List content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit); - - putMsg(result, Status.SUCCESS); - Map map = new HashMap<>(); - map.put(ALIAS, resource.getAlias()); - map.put(CONTENT, String.join("\n", content)); - result.setData(map); - }else{ - logger.error("read file {} not exist in hdfs", hdfsFileName); - putMsg(result, Status.RESOURCE_FILE_NOT_EXIST,hdfsFileName); - } - - } catch (Exception e) { - logger.error("Resource {} read failed", hdfsFileName, e); - putMsg(result, Status.HDFS_OPERATION_ERROR); - } - - return result; - } + Result readResource(int resourceId, int skipLineNum, int limit); /** * create resource file online @@ -933,73 +166,7 @@ public class ResourcesService extends BaseService { * @param content content * @return create result code */ - @Transactional(rollbackFor = Exception.class) - public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDirectory) { - Result result = new Result(); - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - //check file suffix - String nameSuffix = fileSuffix.trim(); - String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); - if (StringUtils.isNotEmpty(resourceViewSuffixs)) { - List strList = Arrays.asList(resourceViewSuffixs.split(",")); - if (!strList.contains(nameSuffix)) { - logger.error("resouce suffix {} not support create", nameSuffix); - putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); - return result; - } - } - - String name = fileName.trim() + "." + nameSuffix; - String fullName = currentDirectory.equals("/") ? String.format("%s%s",currentDirectory,name):String.format("%s/%s",currentDirectory,name); - - result = verifyResourceName(fullName,type,loginUser); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - return result; - } - if (pid != -1) { - Resource parentResource = resourcesMapper.selectById(pid); - - if (parentResource == null) { - putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); - return result; - } - - if (!hasPerm(loginUser, parentResource.getUserId())) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - } - - // save data - Date now = new Date(); - Resource resource = new Resource(pid,name,fullName,false,desc,name,loginUser.getId(),type,content.getBytes().length,now,now); - - resourcesMapper.insert(resource); - - putMsg(result, Status.SUCCESS); - Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap<>(); - for (Map.Entry entry: dataMap.entrySet()) { - if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { - resultMap.put(entry.getKey().toString(), entry.getValue()); - } - } - result.setData(resultMap); - - String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); - - result = uploadContentToHdfs(fullName, tenantCode, content); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - throw new RuntimeException(result.getMsg()); - } - return result; - } + Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDirectory); /** * updateProcessInstance resource @@ -1008,145 +175,16 @@ public class ResourcesService extends BaseService { * @param content content * @return update result cod */ - @Transactional(rollbackFor = Exception.class) - public Result updateResourceContent(int resourceId, String content) { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - Resource resource = resourcesMapper.selectById(resourceId); - if (resource == null) { - logger.error("read file not exist, resource id {}", resourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - //check can edit by file suffix - String nameSuffix = FileUtils.suffix(resource.getAlias()); - String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); - if (StringUtils.isNotEmpty(resourceViewSuffixs)) { - List strList = Arrays.asList(resourceViewSuffixs.split(",")); - if (!strList.contains(nameSuffix)) { - logger.error("resource suffix {} not support updateProcessInstance, resource id {}", nameSuffix, resourceId); - putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); - return result; - } - } - - String tenantCode = getTenantCode(resource.getUserId(),result); - if (StringUtils.isEmpty(tenantCode)){ - return result; - } - resource.setSize(content.getBytes().length); - resource.setUpdateTime(new Date()); - resourcesMapper.updateById(resource); - - - result = uploadContentToHdfs(resource.getFullName(), tenantCode, content); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - throw new RuntimeException(result.getMsg()); - } - return result; - } - - /** - * @param resourceName resource name - * @param tenantCode tenant code - * @param content content - * @return result - */ - private Result uploadContentToHdfs(String resourceName, String tenantCode, String content) { - Result result = new Result(); - String localFilename = ""; - String hdfsFileName = ""; - try { - localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); - - if (!FileUtils.writeContent2File(content, localFilename)) { - // write file fail - logger.error("file {} fail, content is {}", localFilename, content); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - // get resource file hdfs path - hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resourceName); - String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); - logger.info("resource hdfs path is {} ", hdfsFileName); - - HadoopUtils hadoopUtils = HadoopUtils.getInstance(); - if (!hadoopUtils.exists(resourcePath)) { - // create if tenant dir not exists - createTenantDirIfNotExists(tenantCode); - } - if (hadoopUtils.exists(hdfsFileName)) { - hadoopUtils.delete(hdfsFileName, false); - } - - hadoopUtils.copyLocalToHdfs(localFilename, hdfsFileName, true, true); - } catch (Exception e) { - logger.error(e.getMessage(), e); - result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); - result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, hdfsFileName)); - return result; - } - putMsg(result, Status.SUCCESS); - return result; - } - + Result updateResourceContent(int resourceId, String content); /** * download file * * @param resourceId resource id * @return resource content - * @throws Exception exception + * @throws IOException exception */ - public org.springframework.core.io.Resource downloadResource(int resourceId) throws Exception { - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - throw new RuntimeException("hdfs not startup"); - } - - Resource resource = resourcesMapper.selectById(resourceId); - if (resource == null) { - logger.error("download file not exist, resource id {}", resourceId); - return null; - } - if (resource.isDirectory()) { - logger.error("resource id {} is directory,can't download it", resourceId); - throw new RuntimeException("cant't download directory"); - } - - int userId = resource.getUserId(); - User user = userMapper.selectById(userId); - if(user == null){ - logger.error("user id {} not exists", userId); - throw new RuntimeException(String.format("resource owner id %d not exist",userId)); - } - - Tenant tenant = tenantMapper.queryById(user.getTenantId()); - if(tenant == null){ - logger.error("tenant id {} not exists", user.getTenantId()); - throw new RuntimeException(String.format("The tenant id %d of resource owner not exist",user.getTenantId())); - } - - String tenantCode = tenant.getTenantCode(); - - String hdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName()); - - String localFileName = FileUtils.getDownloadFilename(resource.getAlias()); - logger.info("resource hdfs path is {} ", hdfsFileName); - - HadoopUtils.getInstance().copyHdfsToLocal(hdfsFileName, localFileName, false, true); - return org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName); - } - + org.springframework.core.io.Resource downloadResource(int resourceId) throws IOException; /** * list all file @@ -1155,25 +193,7 @@ public class ResourcesService extends BaseService { * @param userId user id * @return unauthorized result code */ - public Map authorizeResourceTree(User loginUser, Integer userId) { - - Map result = new HashMap<>(); - if (isNotAdmin(loginUser, result)) { - return result; - } - List resourceList = resourcesMapper.queryResourceExceptUserId(userId); - List list; - if (CollectionUtils.isNotEmpty(resourceList)) { - Visitor visitor = new ResourceTreeVisitor(resourceList); - list = visitor.visit().getChildren(); - } else { - list = new ArrayList<>(0); - } - - result.put(Constants.DATA_LIST, list); - putMsg(result, Status.SUCCESS); - return result; - } + Map authorizeResourceTree(User loginUser, Integer userId); /** * unauthorized file @@ -1182,28 +202,7 @@ public class ResourcesService extends BaseService { * @param userId user id * @return unauthorized result code */ - public Map unauthorizedFile(User loginUser, Integer userId) { - - Map result = new HashMap<>(); - if (isNotAdmin(loginUser, result)) { - return result; - } - List resourceList = resourcesMapper.queryResourceExceptUserId(userId); - List list; - if (resourceList != null && resourceList.size() > 0) { - Set resourceSet = new HashSet<>(resourceList); - List authedResourceList = resourcesMapper.queryAuthorizedResourceList(userId); - - getAuthorizedResourceList(resourceSet, authedResourceList); - list = new ArrayList<>(resourceSet); - } else { - list = new ArrayList<>(0); - } - Visitor visitor = new ResourceTreeVisitor(list); - result.put(Constants.DATA_LIST, visitor.visit().getChildren()); - putMsg(result, Status.SUCCESS); - return result; - } + Map unauthorizedFile(User loginUser, Integer userId); /** * unauthorized udf function @@ -1212,29 +211,7 @@ public class ResourcesService extends BaseService { * @param userId user id * @return unauthorized result code */ - public Map unauthorizedUDFFunction(User loginUser, Integer userId) { - Map result = new HashMap<>(5); - //only admin can operate - if (isNotAdmin(loginUser, result)) { - return result; - } - - List udfFuncList = udfFunctionMapper.queryUdfFuncExceptUserId(userId); - List resultList = new ArrayList<>(); - Set udfFuncSet = null; - if (CollectionUtils.isNotEmpty(udfFuncList)) { - udfFuncSet = new HashSet<>(udfFuncList); - - List authedUDFFuncList = udfFunctionMapper.queryAuthedUdfFunc(userId); - - getAuthorizedResourceList(udfFuncSet, authedUDFFuncList); - resultList = new ArrayList<>(udfFuncSet); - } - result.put(Constants.DATA_LIST, resultList); - putMsg(result, Status.SUCCESS); - return result; - } - + Map unauthorizedUDFFunction(User loginUser, Integer userId); /** * authorized udf function @@ -1243,17 +220,7 @@ public class ResourcesService extends BaseService { * @param userId user id * @return authorized result code */ - public Map authorizedUDFFunction(User loginUser, Integer userId) { - Map result = new HashMap<>(); - if (isNotAdmin(loginUser, result)) { - return result; - } - List udfFuncs = udfFunctionMapper.queryAuthedUdfFunc(userId); - result.put(Constants.DATA_LIST, udfFuncs); - putMsg(result, Status.SUCCESS); - return result; - } - + Map authorizedUDFFunction(User loginUser, Integer userId); /** * authorized file @@ -1262,91 +229,6 @@ public class ResourcesService extends BaseService { * @param userId user id * @return authorized result */ - public Map authorizedFile(User loginUser, Integer userId) { - Map result = new HashMap<>(5); - if (isNotAdmin(loginUser, result)) { - return result; - } - List authedResources = resourcesMapper.queryAuthorizedResourceList(userId); - Visitor visitor = new ResourceTreeVisitor(authedResources); - String visit = JSONUtils.toJsonString(visitor.visit(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); - logger.info(visit); - String jsonTreeStr = JSONUtils.toJsonString(visitor.visit().getChildren(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); - logger.info(jsonTreeStr); - result.put(Constants.DATA_LIST, visitor.visit().getChildren()); - putMsg(result,Status.SUCCESS); - return result; - } - - /** - * get authorized resource list - * - * @param resourceSet resource set - * @param authedResourceList authorized resource list - */ - private void getAuthorizedResourceList(Set resourceSet, List authedResourceList) { - Set authedResourceSet = null; - if (CollectionUtils.isNotEmpty(authedResourceList)) { - authedResourceSet = new HashSet<>(authedResourceList); - resourceSet.removeAll(authedResourceSet); - } - } - - /** - * get tenantCode by UserId - * - * @param userId user id - * @param result return result - * @return - */ - private String getTenantCode(int userId,Result result){ - - User user = userMapper.selectById(userId); - if (user == null) { - logger.error("user {} not exists", userId); - putMsg(result, Status.USER_NOT_EXIST,userId); - return null; - } - - Tenant tenant = tenantMapper.queryById(user.getTenantId()); - if (tenant == null){ - logger.error("tenant not exists"); - putMsg(result, Status.TENANT_NOT_EXIST); - return null; - } - return tenant.getTenantCode(); - } - - /** - * list all children id - * @param resource resource - * @param containSelf whether add self to children list - * @return all children id - */ - List listAllChildren(Resource resource,boolean containSelf){ - List childList = new ArrayList<>(); - if (resource.getId() != -1 && containSelf) { - childList.add(resource.getId()); - } - - if(resource.isDirectory()){ - listAllChildren(resource.getId(),childList); - } - return childList; - } - - /** - * list all children id - * @param resourceId resource id - * @param childList child list - */ - void listAllChildren(int resourceId,List childList){ - - List children = resourcesMapper.listChildren(resourceId); - for(int chlidId:children){ - childList.add(chlidId); - listAllChildren(chlidId,childList); - } - } + Map authorizedFile(User loginUser, Integer userId); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java index 55880ad63c..18f3ebf55f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java @@ -17,77 +17,18 @@ package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.dto.ScheduleParam; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.exceptions.ServiceException; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; -import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.WarningType; -import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; -import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; -import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; -import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; -import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; -import java.text.ParseException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import org.quartz.CronExpression; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; - /** * scheduler service */ -@Service -public class SchedulerService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(SchedulerService.class); - - @Autowired - private ProjectService projectService; - - @Autowired - private ExecutorService executorService; - - @Autowired - private MonitorService monitorService; - - @Autowired - private ProcessService processService; - - @Autowired - private ScheduleMapper scheduleMapper; - - @Autowired - private ProjectMapper projectMapper; - - @Autowired - private ProcessDefinitionMapper processDefinitionMapper; +public interface SchedulerService { /** * save schedule @@ -103,80 +44,14 @@ public class SchedulerService extends BaseService { * @param workerGroup worker group * @return create result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map insertSchedule(User loginUser, String projectName, - Integer processDefineId, - String schedule, - WarningType warningType, - int warningGroupId, - FailureStrategy failureStrategy, - Priority processInstancePriority, - String workerGroup) { - - Map result = new HashMap(); - - Project project = projectMapper.queryByName(projectName); - - // check project auth - boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); - if (!hasProjectAndPerm) { - return result; - } - - // check work flow define release state - ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); - result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId); - if (result.get(Constants.STATUS) != Status.SUCCESS) { - return result; - } - - Schedule scheduleObj = new Schedule(); - Date now = new Date(); - - scheduleObj.setProjectName(projectName); - scheduleObj.setProcessDefinitionId(processDefinition.getId()); - scheduleObj.setProcessDefinitionName(processDefinition.getName()); - - ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); - if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { - logger.warn("The start time must not be the same as the end"); - putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); - return result; - } - scheduleObj.setStartTime(scheduleParam.getStartTime()); - scheduleObj.setEndTime(scheduleParam.getEndTime()); - if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { - logger.error(scheduleParam.getCrontab() + " verify failure"); - - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); - return result; - } - scheduleObj.setCrontab(scheduleParam.getCrontab()); - scheduleObj.setWarningType(warningType); - scheduleObj.setWarningGroupId(warningGroupId); - scheduleObj.setFailureStrategy(failureStrategy); - scheduleObj.setCreateTime(now); - scheduleObj.setUpdateTime(now); - scheduleObj.setUserId(loginUser.getId()); - scheduleObj.setUserName(loginUser.getUserName()); - scheduleObj.setReleaseState(ReleaseState.OFFLINE); - scheduleObj.setProcessInstancePriority(processInstancePriority); - scheduleObj.setWorkerGroup(workerGroup); - scheduleMapper.insert(scheduleObj); - - /** - * updateProcessInstance receivers and cc by process definition id - */ - processDefinition.setWarningGroupId(warningGroupId); - processDefinitionMapper.updateById(processDefinition); - - // return scheduler object with ID - result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId())); - putMsg(result, Status.SUCCESS); - - result.put("scheduleId", scheduleObj.getId()); - return result; - } + Map insertSchedule(User loginUser, String projectName, + Integer processDefineId, + String schedule, + WarningType warningType, + int warningGroupId, + FailureStrategy failureStrategy, + Priority processInstancePriority, + String workerGroup); /** * updateProcessInstance schedule @@ -193,95 +68,16 @@ public class SchedulerService extends BaseService { * @param scheduleStatus schedule status * @return update result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map updateSchedule(User loginUser, - String projectName, - Integer id, - String scheduleExpression, - WarningType warningType, - int warningGroupId, - FailureStrategy failureStrategy, - ReleaseState scheduleStatus, - Priority processInstancePriority, - String workerGroup) { - Map result = new HashMap(5); - - Project project = projectMapper.queryByName(projectName); - - // check project auth - boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); - if (!hasProjectAndPerm) { - return result; - } - - // check schedule exists - Schedule schedule = scheduleMapper.selectById(id); - - if (schedule == null) { - putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); - return result; - } - - ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId()); - if (processDefinition == null) { - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); - return result; - } - - /** - * scheduling on-line status forbid modification - */ - if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { - return result; - } - - Date now = new Date(); - - // updateProcessInstance param - if (StringUtils.isNotEmpty(scheduleExpression)) { - ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); - if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { - logger.warn("The start time must not be the same as the end"); - putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); - return result; - } - schedule.setStartTime(scheduleParam.getStartTime()); - schedule.setEndTime(scheduleParam.getEndTime()); - if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { - putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); - return result; - } - schedule.setCrontab(scheduleParam.getCrontab()); - } - - if (warningType != null) { - schedule.setWarningType(warningType); - } - - schedule.setWarningGroupId(warningGroupId); - - if (failureStrategy != null) { - schedule.setFailureStrategy(failureStrategy); - } - - if (scheduleStatus != null) { - schedule.setReleaseState(scheduleStatus); - } - schedule.setWorkerGroup(workerGroup); - schedule.setUpdateTime(now); - schedule.setProcessInstancePriority(processInstancePriority); - scheduleMapper.updateById(schedule); - - /** - * updateProcessInstance recipients and cc by process definition ID - */ - processDefinition.setWarningGroupId(warningGroupId); - - processDefinitionMapper.updateById(processDefinition); - - putMsg(result, Status.SUCCESS); - return result; - } + Map updateSchedule(User loginUser, + String projectName, + Integer id, + String scheduleExpression, + WarningType warningType, + int warningGroupId, + FailureStrategy failureStrategy, + ReleaseState scheduleStatus, + Priority processInstancePriority, + String workerGroup); /** @@ -293,110 +89,10 @@ public class SchedulerService extends BaseService { * @param scheduleStatus schedule status * @return publish result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map setScheduleState(User loginUser, - String projectName, - Integer id, - ReleaseState scheduleStatus) { - - Map result = new HashMap(5); - - Project project = projectMapper.queryByName(projectName); - // check project auth - boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); - if (!hasProjectAndPerm) { - return result; - } - - // check schedule exists - Schedule scheduleObj = scheduleMapper.selectById(id); - - if (scheduleObj == null) { - putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); - return result; - } - // check schedule release state - if (scheduleObj.getReleaseState() == scheduleStatus) { - logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", - scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); - putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); - return result; - } - ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId()); - if (processDefinition == null) { - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); - return result; - } - - if (scheduleStatus == ReleaseState.ONLINE) { - // check process definition release state - if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { - logger.info("not release process definition id: {} , name : {}", - processDefinition.getId(), processDefinition.getName()); - putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); - return result; - } - // check sub process definition release state - List subProcessDefineIds = new ArrayList<>(); - processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); - Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); - if (subProcessDefineIds.size() > 0) { - List subProcessDefinitionList = - processDefinitionMapper.queryDefinitionListByIdList(idArray); - if (subProcessDefinitionList != null && subProcessDefinitionList.size() > 0) { - for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) { - /** - * if there is no online process, exit directly - */ - if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) { - logger.info("not release process definition id: {} , name : {}", - subProcessDefinition.getId(), subProcessDefinition.getName()); - putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); - return result; - } - } - } - } - } - - // check master server exists - List masterServers = monitorService.getServerListFromZK(true); - - if (masterServers.size() == 0) { - putMsg(result, Status.MASTER_NOT_EXISTS); - return result; - } - - // set status - scheduleObj.setReleaseState(scheduleStatus); - - scheduleMapper.updateById(scheduleObj); - - try { - switch (scheduleStatus) { - case ONLINE: { - logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); - setSchedule(project.getId(), scheduleObj); - break; - } - case OFFLINE: { - logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); - deleteSchedule(project.getId(), id); - break; - } - default: { - putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); - return result; - } - } - } catch (Exception e) { - result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); - throw new ServiceException(result.get(Constants.MSG).toString()); - } - - putMsg(result, Status.SUCCESS); - return result; - } + Map setScheduleState(User loginUser, + String projectName, + Integer id, + ReleaseState scheduleStatus); /** * query schedule @@ -409,36 +105,7 @@ public class SchedulerService extends BaseService { * @param searchVal search value * @return schedule list page */ - public Map querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) { - - HashMap result = new HashMap<>(); - - Project project = projectMapper.queryByName(projectName); - - // check project auth - boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); - if (!hasProjectAndPerm) { - return result; - } - - ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); - if (processDefinition == null) { - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); - return result; - } - Page page = new Page(pageNo, pageSize); - IPage scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging( - page, processDefineId, searchVal - ); - - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int) scheduleIPage.getTotal()); - pageInfo.setLists(scheduleIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } + Map querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize); /** * query schedule list @@ -447,41 +114,7 @@ public class SchedulerService extends BaseService { * @param projectName project name * @return schedule list */ - public Map queryScheduleList(User loginUser, String projectName) { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - // check project auth - boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); - if (!hasProjectAndPerm) { - return result; - } - - List schedules = scheduleMapper.querySchedulerListByProjectName(projectName); - - result.put(Constants.DATA_LIST, schedules); - putMsg(result, Status.SUCCESS); - - return result; - } - - public void setSchedule(int projectId, Schedule schedule) { - - int scheduleId = schedule.getId(); - logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId); - - Date startDate = schedule.getStartTime(); - Date endDate = schedule.getEndTime(); - - String jobName = QuartzExecutors.buildJobName(scheduleId); - String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); - - Map dataMap = QuartzExecutors.buildDataMap(projectId, scheduleId, schedule); - - QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, jobName, jobGroupName, startDate, endDate, - schedule.getCrontab(), dataMap); - - } + Map queryScheduleList(User loginUser, String projectName); /** * delete schedule @@ -490,35 +123,7 @@ public class SchedulerService extends BaseService { * @param scheduleId schedule id * @throws RuntimeException runtime exception */ - public static void deleteSchedule(int projectId, int scheduleId) { - logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId); - - String jobName = QuartzExecutors.buildJobName(scheduleId); - String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); - - if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) { - logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId); - throw new ServiceException("set offline failure"); - } - - } - - /** - * check valid - * - * @param result result - * @param bool bool - * @param status status - * @return check result code - */ - private boolean checkValid(Map result, boolean bool, Status status) { - // timeout is valid - if (bool) { - putMsg(result, status); - return true; - } - return false; - } + void deleteSchedule(int projectId, int scheduleId); /** * delete schedule by id @@ -528,46 +133,7 @@ public class SchedulerService extends BaseService { * @param scheduleId scheule id * @return delete result code */ - public Map deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { - - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - Schedule schedule = scheduleMapper.selectById(scheduleId); - - if (schedule == null) { - putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); - return result; - } - - // Determine if the login user is the owner of the schedule - if (loginUser.getId() != schedule.getUserId() - && loginUser.getUserType() != UserType.ADMIN_USER) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - // check schedule is already online - if (schedule.getReleaseState() == ReleaseState.ONLINE) { - putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); - return result; - } - - int delete = scheduleMapper.deleteById(scheduleId); - - if (delete > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); - } - return result; - } + Map deleteScheduleById(User loginUser, String projectName, Integer scheduleId); /** * preview schedule @@ -577,24 +143,5 @@ public class SchedulerService extends BaseService { * @param schedule schedule expression * @return the next five fire time */ - public Map previewSchedule(User loginUser, String projectName, String schedule) { - Map result = new HashMap<>(); - CronExpression cronExpression; - ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); - Date now = new Date(); - - Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime(); - Date endTime = scheduleParam.getEndTime(); - try { - cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab()); - } catch (ParseException e) { - logger.error(e.getMessage(), e); - putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); - return result; - } - List selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime, cronExpression, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT); - result.put(Constants.DATA_LIST, selfFireDateList.stream().map(t -> DateUtils.dateToString(t))); - putMsg(result, Status.SUCCESS); - return result; - } + Map previewSchedule(User loginUser, String projectName, String schedule); } \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java index dc911f51e3..33700d4a8e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java @@ -14,13 +14,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; -import javax.servlet.http.HttpServletRequest; +package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.dao.entity.Session; import org.apache.dolphinscheduler.dao.entity.User; +import javax.servlet.http.HttpServletRequest; + /** * session service */ diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java index 6c68202313..cbbc89bde0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java @@ -17,57 +17,15 @@ package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; -import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; -import org.apache.dolphinscheduler.service.process.ProcessService; -import java.text.MessageFormat; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; import java.util.Map; -import java.util.Set; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * task instance service */ -@Service -public class TaskInstanceService extends BaseService { - - @Autowired - ProjectMapper projectMapper; - - @Autowired - ProjectService projectService; - - @Autowired - ProcessService processService; - - @Autowired - TaskInstanceMapper taskInstanceMapper; - - @Autowired - ProcessInstanceService processInstanceService; - - @Autowired - UsersService usersService; +public interface TaskInstanceService { /** * query task list by project, process instance, task name, task start time, task end time, task status, keyword paging @@ -85,65 +43,10 @@ public class TaskInstanceService extends BaseService { * @param pageSize page size * @return task list page */ - public Map queryTaskListPaging(User loginUser, String projectName, - Integer processInstanceId, String processInstanceName, String taskName, String executorName, String startDate, - String endDate, String searchVal, ExecutionStatus stateType, String host, - Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - return checkResult; - } - - int[] statusArray = null; - if (stateType != null) { - statusArray = new int[]{stateType.ordinal()}; - } - - Date start = null; - Date end = null; - if (StringUtils.isNotEmpty(startDate)) { - start = DateUtils.getScheduleDate(startDate); - if (start == null) { - return generateInvalidParamRes(result, "startDate"); - } - } - if (StringUtils.isNotEmpty(endDate)) { - end = DateUtils.getScheduleDate(endDate); - if (end == null) { - return generateInvalidParamRes(result, "endDate"); - } - } - - Page page = new Page(pageNo, pageSize); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - int executorId = usersService.getUserIdByName(executorName); - - IPage taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging( - page, project.getId(), processInstanceId, processInstanceName, searchVal, taskName, executorId, statusArray, host, start, end - ); - Set exclusionSet = new HashSet<>(); - exclusionSet.add(Constants.CLASS); - exclusionSet.add("taskJson"); - List taskInstanceList = taskInstanceIPage.getRecords(); - - for (TaskInstance taskInstance : taskInstanceList) { - taskInstance.setDuration(DateUtils.format2Duration(taskInstance.getStartTime(), taskInstance.getEndTime())); - User executor = usersService.queryUser(taskInstance.getExecutorId()); - if (null != executor) { - taskInstance.setExecutorName(executor.getUserName()); - } - } - pageInfo.setTotalCount((int) taskInstanceIPage.getTotal()); - pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(), exclusionSet)); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryTaskListPaging(User loginUser, String projectName, + Integer processInstanceId, String processInstanceName, String taskName, String executorName, String startDate, + String endDate, String searchVal, ExecutionStatus stateType, String host, + Integer pageNo, Integer pageSize); /** * change one task instance's state from failure to forced success @@ -153,51 +56,6 @@ public class TaskInstanceService extends BaseService { * @param taskInstanceId task instance id * @return the result code and msg */ - public Map forceTaskSuccess(User loginUser, String projectName, Integer taskInstanceId) { - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - // check user auth - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - return checkResult; - } + Map forceTaskSuccess(User loginUser, String projectName, Integer taskInstanceId); - // check whether the task instance can be found - TaskInstance task = taskInstanceMapper.selectById(taskInstanceId); - if (task == null) { - putMsg(result, Status.TASK_INSTANCE_NOT_FOUND); - return result; - } - - // check whether the task instance state type is failure - if (!task.getState().typeIsFailure()) { - putMsg(result, Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState().toString()); - return result; - } - - // change the state of the task instance - task.setState(ExecutionStatus.FORCED_SUCCESS); - int changedNum = taskInstanceMapper.updateById(task); - if (changedNum > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.FORCE_TASK_SUCCESS_ERROR); - } - - return result; - } - - /*** - * generate {@link org.apache.dolphinscheduler.api.enums.Status#REQUEST_PARAMS_NOT_VALID_ERROR} res with param name - * @param result exist result map - * @param params invalid params name - * @return update result map - */ - private Map generateInvalidParamRes(Map result, String params) { - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), params)); - return result; - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java index 54eba5c2d6..8c8ad0abff 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java @@ -14,26 +14,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.dao.TaskRecordDao; -import org.apache.dolphinscheduler.dao.entity.TaskRecord; -import org.springframework.stereotype.Service; +package org.apache.dolphinscheduler.api.service; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import static org.apache.dolphinscheduler.common.Constants.*; - /** * task record service */ -@Service -public class TaskRecordService extends BaseService{ +public interface TaskRecordService { /** * query task record list paging @@ -50,33 +39,8 @@ public class TaskRecordService extends BaseService{ * @param isHistory is history * @return task record list */ - public Map queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate, - String taskDate, String sourceTable, - String destTable, String endDate, - String state, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(10); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - - Map map = new HashMap<>(10); - map.put("taskName", taskName); - map.put("taskDate", taskDate); - map.put("state", state); - map.put("sourceTable", sourceTable); - map.put("targetTable", destTable); - map.put("startTime", startDate); - map.put("endTime", endDate); - map.put("offset", pageInfo.getStart().toString()); - map.put("pageSize", pageInfo.getPageSize().toString()); - - String table = isHistory ? TASK_RECORD_TABLE_HISTORY_HIVE_LOG : TASK_RECORD_TABLE_HIVE_LOG; - int count = TaskRecordDao.countTaskRecord(map, table); - List recordList = TaskRecordDao.queryAllTaskRecord(map, table); - pageInfo.setTotalCount(count); - pageInfo.setLists(recordList); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - - } + Map queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate, + String taskDate, String sourceTable, + String destTable, String endDate, + String state, Integer pageNo, Integer pageSize); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java index cd962fdc70..ac88d739c5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java @@ -14,51 +14,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UdfType; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.Resource; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; -import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; -import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; -import java.util.Date; -import java.util.HashMap; -import java.util.List; import java.util.Map; /** - * udf function service + * udf func service */ -@Service -public class UdfFuncService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(UdfFuncService.class); - - @Autowired - private ResourceMapper resourceMapper; - - @Autowired - private UdfFuncMapper udfFuncMapper; - - @Autowired - private UDFUserMapper udfUserMapper; - +public interface UdfFuncService { /** * create udf function @@ -73,72 +41,14 @@ public class UdfFuncService extends BaseService{ * @param className class name * @return create result code */ - public Result createUdfFunction(User loginUser, - String funcName, - String className, - String argTypes, - String database, - String desc, - UdfType type, - int resourceId) { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - // verify udf func name exist - if (checkUdfFuncNameExists(funcName)) { - logger.error("udf func {} has exist, can't recreate", funcName); - putMsg(result, Status.UDF_FUNCTION_EXISTS); - return result; - } - - Resource resource = resourceMapper.selectById(resourceId); - if (resource == null) { - logger.error("resourceId {} is not exist", resourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - //save data - UdfFunc udf = new UdfFunc(); - Date now = new Date(); - udf.setUserId(loginUser.getId()); - udf.setFuncName(funcName); - udf.setClassName(className); - if (StringUtils.isNotEmpty(argTypes)) { - udf.setArgTypes(argTypes); - } - if (StringUtils.isNotEmpty(database)) { - udf.setDatabase(database); - } - udf.setDescription(desc); - udf.setResourceId(resourceId); - udf.setResourceName(resource.getFullName()); - udf.setType(type); - - udf.setCreateTime(now); - udf.setUpdateTime(now); - - udfFuncMapper.insert(udf); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * - * @param name name - * @return check result code - */ - private boolean checkUdfFuncNameExists(String name){ - List resource = udfFuncMapper.queryUdfByIdStr(null, name); - return resource != null && resource.size() > 0; - } - + Result createUdfFunction(User loginUser, + String funcName, + String className, + String argTypes, + String database, + String desc, + UdfType type, + int resourceId); /** * query udf function @@ -146,18 +56,7 @@ public class UdfFuncService extends BaseService{ * @param id udf function id * @return udf function detail */ - public Map queryUdfFuncDetail(int id) { - - Map result = new HashMap<>(5); - UdfFunc udfFunc = udfFuncMapper.selectById(id); - if (udfFunc == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - result.put(Constants.DATA_LIST, udfFunc); - putMsg(result, Status.SUCCESS); - return result; - } + Map queryUdfFuncDetail(int id); /** * updateProcessInstance udf function @@ -172,67 +71,14 @@ public class UdfFuncService extends BaseService{ * @param className class name * @return update result code */ - public Map updateUdfFunc(int udfFuncId, - String funcName, - String className, - String argTypes, - String database, - String desc, - UdfType type, - int resourceId) { - Map result = new HashMap<>(); - // verify udfFunc is exist - UdfFunc udf = udfFuncMapper.selectUdfById(udfFuncId); - - if (udf == null) { - result.put(Constants.STATUS, Status.UDF_FUNCTION_NOT_EXIST); - result.put(Constants.MSG, Status.UDF_FUNCTION_NOT_EXIST.getMsg()); - return result; - } - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - // verify udfFuncName is exist - if (!funcName.equals(udf.getFuncName())) { - if (checkUdfFuncNameExists(funcName)) { - logger.error("UdfFunc {} has exist, can't create again.", funcName); - result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS); - result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg()); - return result; - } - } - - Resource resource = resourceMapper.selectById(resourceId); - if (resource == null) { - logger.error("resourceId {} is not exist", resourceId); - result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST); - result.put(Constants.MSG, Status.RESOURCE_NOT_EXIST.getMsg()); - return result; - } - Date now = new Date(); - udf.setFuncName(funcName); - udf.setClassName(className); - udf.setArgTypes(argTypes); - if (StringUtils.isNotEmpty(database)) { - udf.setDatabase(database); - } - udf.setDescription(desc); - udf.setResourceId(resourceId); - udf.setResourceName(resource.getFullName()); - udf.setType(type); - - udf.setUpdateTime(now); - - udfFuncMapper.updateById(udf); - putMsg(result, Status.SUCCESS); - return result; - } - + Map updateUdfFunc(int udfFuncId, + String funcName, + String className, + String argTypes, + String database, + String desc, + UdfType type, + int resourceId); /** * query udf function list paging @@ -243,37 +89,7 @@ public class UdfFuncService extends BaseService{ * @param searchVal search value * @return udf function list page */ - public Map queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); - - - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - IPage udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo); - pageInfo.setTotalCount((int)udfFuncList.getTotal()); - pageInfo.setLists(udfFuncList.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * get udf functions - * - * @param loginUser login user - * @param searchVal search value - * @param pageSize page size - * @param pageNo page number - * @return udf function list page - */ - private IPage getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) { - - int userId = loginUser.getId(); - if (isAdmin(loginUser)) { - userId = 0; - } - Page page = new Page(pageNo, pageSize); - return udfFuncMapper.queryUdfFuncPaging(page, userId, searchVal); - } + Map queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize); /** * query udf list @@ -282,18 +98,7 @@ public class UdfFuncService extends BaseService{ * @param type udf type * @return udf func list */ - public Map queryUdfFuncList(User loginUser, Integer type) { - Map result = new HashMap<>(5); - int userId = loginUser.getId(); - if (isAdmin(loginUser)) { - userId = 0; - } - List udfFuncList = udfFuncMapper.getUdfFuncByType(userId, type); - - result.put(Constants.DATA_LIST, udfFuncList); - putMsg(result, Status.SUCCESS); - return result; - } + Map queryUdfFuncList(User loginUser, Integer type); /** * delete udf function @@ -301,15 +106,7 @@ public class UdfFuncService extends BaseService{ * @param id udf function id * @return delete result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Result delete(int id) { - Result result = new Result(); - - udfFuncMapper.deleteById(id); - udfUserMapper.deleteByUdfFuncId(id); - putMsg(result, Status.SUCCESS); - return result; - } + Result delete(int id); /** * verify udf function by name @@ -317,16 +114,6 @@ public class UdfFuncService extends BaseService{ * @param name name * @return true if the name can user, otherwise return false */ - public Result verifyUdfFuncByName(String name) { - Result result = new Result(); - if (checkUdfFuncNameExists(name)) { - logger.error("UDF function name:{} has exist, can't create again.", name); - putMsg(result, Status.UDF_FUNCTION_EXISTS); - } else { - putMsg(result, Status.SUCCESS); - } - - return result; - } + Result verifyUdfFuncByName(String name); } \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UiPluginService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UiPluginService.java index 102d927673..325f1672c4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UiPluginService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UiPluginService.java @@ -22,7 +22,7 @@ import org.apache.dolphinscheduler.common.enums.PluginType; import java.util.Map; /** - * UiPluginService + * ui plugin service */ public interface UiPluginService { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java index 3fb5f64346..ff73e52c42 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java @@ -17,96 +17,18 @@ package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; -import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.exceptions.ServiceException; -import org.apache.dolphinscheduler.api.utils.CheckUtils; -import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.EncryptionUtils; -import org.apache.dolphinscheduler.common.utils.HadoopUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.AlertGroup; -import org.apache.dolphinscheduler.dao.entity.DatasourceUser; -import org.apache.dolphinscheduler.dao.entity.ProjectUser; -import org.apache.dolphinscheduler.dao.entity.Resource; -import org.apache.dolphinscheduler.dao.entity.ResourcesUser; -import org.apache.dolphinscheduler.dao.entity.Tenant; -import org.apache.dolphinscheduler.dao.entity.UDFUser; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; -import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; -import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; -import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; -import org.apache.dolphinscheduler.dao.mapper.TenantMapper; -import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; -import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; import java.io.IOException; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** - * user service + * users service */ -@Service -public class UsersService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(UsersService.class); - - @Autowired - private UserMapper userMapper; - - @Autowired - private TenantMapper tenantMapper; - - @Autowired - private ProjectUserMapper projectUserMapper; - - @Autowired - private ResourceUserMapper resourcesUserMapper; - - @Autowired - private ResourceMapper resourceMapper; - - @Autowired - private DataSourceUserMapper datasourceUserMapper; - - @Autowired - private UDFUserMapper udfUserMapper; - - @Autowired - private AlertGroupMapper alertGroupMapper; - - @Autowired - private ProcessDefinitionMapper processDefinitionMapper; - +public interface UsersService { /** * create user, only system admin have permission @@ -121,104 +43,16 @@ public class UsersService extends BaseService { * @return create result code * @throws Exception exception */ - @Transactional(rollbackFor = Exception.class) - public Map createUser(User loginUser, - String userName, - String userPassword, - String email, - int tenantId, - String phone, - String queue, - int state) throws Exception { - - Map result = new HashMap<>(5); - - //check all user params - String msg = this.checkUserParams(userName, userPassword, email, phone); - - if (!StringUtils.isEmpty(msg)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg); - return result; - } - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - if (!checkTenantExists(tenantId)) { - putMsg(result, Status.TENANT_NOT_EXIST); - return result; - } - - User user = createUser(userName, userPassword, email, tenantId, phone, queue, state); + Map createUser(User loginUser, String userName, String userPassword, String email, + int tenantId, String phone, String queue, int state) throws IOException; - Tenant tenant = tenantMapper.queryById(tenantId); - // resource upload startup - if (PropertyUtils.getResUploadStartupState()) { - // if tenant not exists - if (!HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(tenant.getTenantCode()))) { - createTenantDirIfNotExists(tenant.getTenantCode()); - } - String userPath = HadoopUtils.getHdfsUserDir(tenant.getTenantCode(), user.getId()); - HadoopUtils.getInstance().mkdir(userPath); - } - - putMsg(result, Status.SUCCESS); - return result; - - } - - @Transactional(rollbackFor = RuntimeException.class) - public User createUser(String userName, - String userPassword, - String email, - int tenantId, - String phone, - String queue, - int state) { - User user = new User(); - Date now = new Date(); - - user.setUserName(userName); - user.setUserPassword(EncryptionUtils.getMd5(userPassword)); - user.setEmail(email); - user.setTenantId(tenantId); - user.setPhone(phone); - user.setState(state); - // create general users, administrator users are currently built-in - user.setUserType(UserType.GENERAL_USER); - user.setCreateTime(now); - user.setUpdateTime(now); - if (StringUtils.isEmpty(queue)) { - queue = ""; - } - user.setQueue(queue); - - // save user - userMapper.insert(user); - return user; - } + User createUser(String userName, String userPassword, String email, + int tenantId, String phone, String queue, int state); /*** * create User for ldap login */ - @Transactional(rollbackFor = Exception.class) - public User createUser(UserType userType, String userId, String email) { - User user = new User(); - Date now = new Date(); - - user.setUserName(userId); - user.setEmail(email); - // create general users, administrator users are currently built-in - user.setUserType(userType); - user.setCreateTime(now); - user.setUpdateTime(now); - user.setQueue(""); - - // save user - userMapper.insert(user); - return user; - } + User createUser(UserType userType, String userId, String email); /** * get user by user name @@ -226,9 +60,7 @@ public class UsersService extends BaseService { * @param userName user name * @return exist user or null */ - public User getUserByUserName(String userName) { - return userMapper.queryByUserNameAccurately(userName); - } + User getUserByUserName(String userName); /** * query user by id @@ -236,9 +68,7 @@ public class UsersService extends BaseService { * @param id id * @return user info */ - public User queryUser(int id) { - return userMapper.selectById(id); - } + User queryUser(int id); /** * query user @@ -246,9 +76,7 @@ public class UsersService extends BaseService { * @param name name * @return user info */ - public User queryUser(String name) { - return userMapper.queryByUserNameAccurately(name); - } + User queryUser(String name); /** * query user @@ -257,10 +85,7 @@ public class UsersService extends BaseService { * @param password password * @return user info */ - public User queryUser(String name, String password) { - String md5 = EncryptionUtils.getMd5(password); - return userMapper.queryUserByNamePassword(name, md5); - } + User queryUser(String name, String password); /** * get user id by user name @@ -268,20 +93,7 @@ public class UsersService extends BaseService { * @param name user name * @return if name empty 0, user not exists -1, user exist user id */ - public int getUserIdByName(String name) { - //executor name query - int executorId = 0; - if (StringUtils.isNotEmpty(name)) { - User executor = queryUser(name); - if (null != executor) { - executorId = executor.getId(); - } else { - executorId = -1; - } - } - - return executorId; - } + int getUserIdByName(String name); /** * query user list @@ -292,25 +104,7 @@ public class UsersService extends BaseService { * @param pageSize page size * @return user list page */ - public Map queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); - - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - return result; - } - - Page page = new Page(pageNo, pageSize); - - IPage scheduleList = userMapper.queryUserPaging(page, searchVal); - - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount((int) scheduleList.getTotal()); - pageInfo.setLists(scheduleList.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize); /** * updateProcessInstance user @@ -327,128 +121,8 @@ public class UsersService extends BaseService { * @return update result code * @throws Exception exception */ - public Map updateUser(User loginUser, int userId, - String userName, - String userPassword, - String email, - int tenantId, - String phone, - String queue, - int state) throws Exception { - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - - if (check(result, !hasPerm(loginUser, userId), Status.USER_NO_OPERATION_PERM)) { - return result; - } - User user = userMapper.selectById(userId); - if (user == null) { - putMsg(result, Status.USER_NOT_EXIST, userId); - return result; - } - if (StringUtils.isNotEmpty(userName)) { - - if (!CheckUtils.checkUserName(userName)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); - return result; - } - - User tempUser = userMapper.queryByUserNameAccurately(userName); - if (tempUser != null && tempUser.getId() != userId) { - putMsg(result, Status.USER_NAME_EXIST); - return result; - } - user.setUserName(userName); - } - - if (StringUtils.isNotEmpty(userPassword)) { - if (!CheckUtils.checkPassword(userPassword)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userPassword); - return result; - } - user.setUserPassword(EncryptionUtils.getMd5(userPassword)); - } - - if (StringUtils.isNotEmpty(email)) { - if (!CheckUtils.checkEmail(email)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, email); - return result; - } - user.setEmail(email); - } - - if (StringUtils.isNotEmpty(phone) && !CheckUtils.checkPhone(phone)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, phone); - return result; - } - user.setPhone(phone); - user.setQueue(queue); - user.setState(state); - Date now = new Date(); - user.setUpdateTime(now); - - //if user switches the tenant, the user's resources need to be copied to the new tenant - if (user.getTenantId() != tenantId) { - Tenant oldTenant = tenantMapper.queryById(user.getTenantId()); - //query tenant - Tenant newTenant = tenantMapper.queryById(tenantId); - if (newTenant != null) { - // if hdfs startup - if (PropertyUtils.getResUploadStartupState() && oldTenant != null) { - String newTenantCode = newTenant.getTenantCode(); - String oldResourcePath = HadoopUtils.getHdfsResDir(oldTenant.getTenantCode()); - String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode()); - - // if old tenant dir exists - if (HadoopUtils.getInstance().exists(oldResourcePath)) { - String newResourcePath = HadoopUtils.getHdfsResDir(newTenantCode); - String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode); - - //file resources list - List fileResourcesList = resourceMapper.queryResourceList( - null, userId, ResourceType.FILE.ordinal()); - if (CollectionUtils.isNotEmpty(fileResourcesList)) { - ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(fileResourcesList); - ResourceComponent resourceComponent = resourceTreeVisitor.visit(); - copyResourceFiles(resourceComponent, oldResourcePath, newResourcePath); - } - - //udf resources - List udfResourceList = resourceMapper.queryResourceList( - null, userId, ResourceType.UDF.ordinal()); - if (CollectionUtils.isNotEmpty(udfResourceList)) { - ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(udfResourceList); - ResourceComponent resourceComponent = resourceTreeVisitor.visit(); - copyResourceFiles(resourceComponent, oldUdfsPath, newUdfsPath); - } - - //Delete the user from the old tenant directory - String oldUserPath = HadoopUtils.getHdfsUserDir(oldTenant.getTenantCode(), userId); - HadoopUtils.getInstance().delete(oldUserPath, true); - } else { - // if old tenant dir not exists , create - createTenantDirIfNotExists(oldTenant.getTenantCode()); - } - - if (HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(newTenant.getTenantCode()))) { - //create user in the new tenant directory - String newUserPath = HadoopUtils.getHdfsUserDir(newTenant.getTenantCode(), user.getId()); - HadoopUtils.getInstance().mkdir(newUserPath); - } else { - // if new tenant dir not exists , create - createTenantDirIfNotExists(newTenant.getTenantCode()); - } - - } - } - user.setTenantId(tenantId); - } - - // updateProcessInstance user - userMapper.updateById(user); - putMsg(result, Status.SUCCESS); - return result; - } + Map updateUser(User loginUser, int userId, String userName, String userPassword, String email, + int tenantId, String phone, String queue, int state) throws IOException; /** * delete user @@ -458,36 +132,7 @@ public class UsersService extends BaseService { * @return delete result code * @throws Exception exception when operate hdfs */ - public Map deleteUserById(User loginUser, int id) throws Exception { - Map result = new HashMap<>(5); - //only admin can operate - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM, id); - return result; - } - //check exist - User tempUser = userMapper.selectById(id); - if (tempUser == null) { - putMsg(result, Status.USER_NOT_EXIST, id); - return result; - } - // delete user - User user = userMapper.queryTenantCodeByUserId(id); - - if (user != null) { - if (PropertyUtils.getResUploadStartupState()) { - String userPath = HadoopUtils.getHdfsUserDir(user.getTenantCode(), id); - if (HadoopUtils.getInstance().exists(userPath)) { - HadoopUtils.getInstance().delete(userPath, true); - } - } - } - - userMapper.deleteById(id); - putMsg(result, Status.SUCCESS); - - return result; - } + Map deleteUserById(User loginUser, int id) throws IOException; /** * grant project @@ -497,46 +142,7 @@ public class UsersService extends BaseService { * @param projectIds project id array * @return grant result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map grantProject(User loginUser, int userId, String projectIds) { - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - return result; - } - - //check exist - User tempUser = userMapper.selectById(userId); - if (tempUser == null) { - putMsg(result, Status.USER_NOT_EXIST, userId); - return result; - } - //if the selected projectIds are empty, delete all items associated with the user - projectUserMapper.deleteProjectRelation(0, userId); - - if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS)) { - return result; - } - - String[] projectIdArr = projectIds.split(","); - - for (String projectId : projectIdArr) { - Date now = new Date(); - ProjectUser projectUser = new ProjectUser(); - projectUser.setUserId(userId); - projectUser.setProjectId(Integer.parseInt(projectId)); - projectUser.setPerm(7); - projectUser.setCreateTime(now); - projectUser.setUpdateTime(now); - projectUserMapper.insert(projectUser); - } - - putMsg(result, Status.SUCCESS); - - return result; - } + Map grantProject(User loginUser, int userId, String projectIds); /** @@ -547,93 +153,7 @@ public class UsersService extends BaseService { * @param resourceIds resource id array * @return grant result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map grantResources(User loginUser, int userId, String resourceIds) { - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - return result; - } - User user = userMapper.selectById(userId); - if (user == null) { - putMsg(result, Status.USER_NOT_EXIST, userId); - return result; - } - - Set needAuthorizeResIds = new HashSet(); - if (StringUtils.isNotBlank(resourceIds)) { - String[] resourceFullIdArr = resourceIds.split(","); - // need authorize resource id set - for (String resourceFullId : resourceFullIdArr) { - String[] resourceIdArr = resourceFullId.split("-"); - for (int i = 0; i <= resourceIdArr.length - 1; i++) { - int resourceIdValue = Integer.parseInt(resourceIdArr[i]); - needAuthorizeResIds.add(resourceIdValue); - } - } - } - - - //get the authorized resource id list by user id - List oldAuthorizedRes = resourceMapper.queryAuthorizedResourceList(userId); - //if resource type is UDF,need check whether it is bound by UDF functon - Set oldAuthorizedResIds = oldAuthorizedRes.stream().map(t -> t.getId()).collect(Collectors.toSet()); - - //get the unauthorized resource id list - oldAuthorizedResIds.removeAll(needAuthorizeResIds); - - if (CollectionUtils.isNotEmpty(oldAuthorizedResIds)) { - - // get all resource id of process definitions those is released - List> list = processDefinitionMapper.listResourcesByUser(userId); - Map> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list); - Set resourceIdSet = resourceProcessMap.keySet(); - - resourceIdSet.retainAll(oldAuthorizedResIds); - if (CollectionUtils.isNotEmpty(resourceIdSet)) { - logger.error("can't be deleted,because it is used of process definition"); - for (Integer resId : resourceIdSet) { - logger.error("resource id:{} is used of process definition {}", resId, resourceProcessMap.get(resId)); - } - putMsg(result, Status.RESOURCE_IS_USED); - return result; - } - - } - - resourcesUserMapper.deleteResourceUser(userId, 0); - - if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) { - return result; - } - - for (int resourceIdValue : needAuthorizeResIds) { - Resource resource = resourceMapper.selectById(resourceIdValue); - if (resource == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - Date now = new Date(); - ResourcesUser resourcesUser = new ResourcesUser(); - resourcesUser.setUserId(userId); - resourcesUser.setResourcesId(resourceIdValue); - if (resource.isDirectory()) { - resourcesUser.setPerm(Constants.AUTHORIZE_READABLE_PERM); - } else { - resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM); - } - - resourcesUser.setCreateTime(now); - resourcesUser.setUpdateTime(now); - resourcesUserMapper.insert(resourcesUser); - - } - - putMsg(result, Status.SUCCESS); - - return result; - } + Map grantResources(User loginUser, int userId, String resourceIds); /** @@ -644,43 +164,7 @@ public class UsersService extends BaseService { * @param udfIds udf id array * @return grant result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map grantUDFFunction(User loginUser, int userId, String udfIds) { - Map result = new HashMap<>(5); - - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - return result; - } - User user = userMapper.selectById(userId); - if (user == null) { - putMsg(result, Status.USER_NOT_EXIST, userId); - return result; - } - - udfUserMapper.deleteByUserId(userId); - - if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) { - return result; - } - - String[] resourcesIdArr = udfIds.split(","); - - for (String udfId : resourcesIdArr) { - Date now = new Date(); - UDFUser udfUser = new UDFUser(); - udfUser.setUserId(userId); - udfUser.setUdfId(Integer.parseInt(udfId)); - udfUser.setPerm(7); - udfUser.setCreateTime(now); - udfUser.setUpdateTime(now); - udfUserMapper.insert(udfUser); - } - - putMsg(result, Status.SUCCESS); - - return result; - } + Map grantUDFFunction(User loginUser, int userId, String udfIds); /** @@ -691,45 +175,7 @@ public class UsersService extends BaseService { * @param datasourceIds data source id array * @return grant result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map grantDataSource(User loginUser, int userId, String datasourceIds) { - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - return result; - } - User user = userMapper.selectById(userId); - if (user == null) { - putMsg(result, Status.USER_NOT_EXIST, userId); - return result; - } - - datasourceUserMapper.deleteByUserId(userId); - - if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS)) { - return result; - } - - String[] datasourceIdArr = datasourceIds.split(","); - - for (String datasourceId : datasourceIdArr) { - Date now = new Date(); - - DatasourceUser datasourceUser = new DatasourceUser(); - datasourceUser.setUserId(userId); - datasourceUser.setDatasourceId(Integer.parseInt(datasourceId)); - datasourceUser.setPerm(7); - datasourceUser.setCreateTime(now); - datasourceUser.setUpdateTime(now); - datasourceUserMapper.insert(datasourceUser); - } - - putMsg(result, Status.SUCCESS); - - return result; - } + Map grantDataSource(User loginUser, int userId, String datasourceIds); /** * query user info @@ -737,34 +183,7 @@ public class UsersService extends BaseService { * @param loginUser login user * @return user info */ - public Map getUserInfo(User loginUser) { - - Map result = new HashMap<>(); - - User user = null; - if (loginUser.getUserType() == UserType.ADMIN_USER) { - user = loginUser; - } else { - user = userMapper.queryDetailsById(loginUser.getId()); - - List alertGroups = alertGroupMapper.queryByUserId(loginUser.getId()); - - StringBuilder sb = new StringBuilder(); - - if (alertGroups != null && alertGroups.size() > 0) { - for (int i = 0; i < alertGroups.size() - 1; i++) { - sb.append(alertGroups.get(i).getGroupName() + ","); - } - sb.append(alertGroups.get(alertGroups.size() - 1)); - user.setAlertGroup(sb.toString()); - } - } - - result.put(Constants.DATA_LIST, user); - - putMsg(result, Status.SUCCESS); - return result; - } + Map getUserInfo(User loginUser); /** * query user list @@ -772,19 +191,7 @@ public class UsersService extends BaseService { * @param loginUser login user * @return user list */ - public Map queryAllGeneralUsers(User loginUser) { - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - return result; - } - - List userList = userMapper.queryAllGeneralUser(); - result.put(Constants.DATA_LIST, userList); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryAllGeneralUsers(User loginUser); /** @@ -793,19 +200,7 @@ public class UsersService extends BaseService { * @param loginUser login user * @return user list */ - public Map queryUserList(User loginUser) { - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - return result; - } - - List userList = userMapper.selectList(null); - result.put(Constants.DATA_LIST, userList); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryUserList(User loginUser); /** * verify user name exists @@ -813,20 +208,7 @@ public class UsersService extends BaseService { * @param userName user name * @return true if user name not exists, otherwise return false */ - public Result verifyUserName(String userName) { - - Result result = new Result(); - User user = userMapper.queryByUserNameAccurately(userName); - if (user != null) { - logger.error("user {} has exist, can't create again.", userName); - - putMsg(result, Status.USER_NAME_EXIST); - } else { - putMsg(result, Status.SUCCESS); - } - - return result; - } + Result verifyUserName(String userName); /** @@ -836,34 +218,7 @@ public class UsersService extends BaseService { * @param alertgroupId alert group id * @return unauthorize result code */ - public Map unauthorizedUser(User loginUser, Integer alertgroupId) { - - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - return result; - } - - List userList = userMapper.selectList(null); - List resultUsers = new ArrayList<>(); - Set userSet = null; - if (userList != null && userList.size() > 0) { - userSet = new HashSet<>(userList); - - List authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId); - - Set authedUserSet = null; - if (authedUserList != null && authedUserList.size() > 0) { - authedUserSet = new HashSet<>(authedUserList); - userSet.removeAll(authedUserSet); - } - resultUsers = new ArrayList<>(userSet); - } - result.put(Constants.DATA_LIST, resultUsers); - putMsg(result, Status.SUCCESS); - - return result; - } + Map unauthorizedUser(User loginUser, Integer alertgroupId); /** @@ -873,86 +228,7 @@ public class UsersService extends BaseService { * @param alertgroupId alert group id * @return authorized result code */ - public Map authorizedUser(User loginUser, Integer alertgroupId) { - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - return result; - } - List userList = userMapper.queryUserListByAlertGroupId(alertgroupId); - result.put(Constants.DATA_LIST, userList); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * @param tenantId tenant id - * @return true if tenant exists, otherwise return false - */ - private boolean checkTenantExists(int tenantId) { - return tenantMapper.queryById(tenantId) != null ? true : false; - } - - /** - * @return if check failed return the field, otherwise return null - */ - private String checkUserParams(String userName, String password, String email, String phone) { - - String msg = null; - if (!CheckUtils.checkUserName(userName)) { - - msg = userName; - } else if (!CheckUtils.checkPassword(password)) { - - msg = password; - } else if (!CheckUtils.checkEmail(email)) { - - msg = email; - } else if (!CheckUtils.checkPhone(phone)) { - - msg = phone; - } - - return msg; - } - - /** - * copy resource files - * - * @param resourceComponent resource component - * @param srcBasePath src base path - * @param dstBasePath dst base path - * @throws IOException io exception - */ - private void copyResourceFiles(ResourceComponent resourceComponent, String srcBasePath, String dstBasePath) throws IOException { - List components = resourceComponent.getChildren(); - - if (CollectionUtils.isNotEmpty(components)) { - for (ResourceComponent component : components) { - // verify whether exist - if (!HadoopUtils.getInstance().exists(String.format("%s/%s", srcBasePath, component.getFullName()))) { - logger.error("resource file: {} not exist,copy error", component.getFullName()); - throw new ServiceException(Status.RESOURCE_NOT_EXIST); - } - - if (!component.isDirctory()) { - // copy it to dst - HadoopUtils.getInstance().copy(String.format("%s/%s", srcBasePath, component.getFullName()), String.format("%s/%s", dstBasePath, component.getFullName()), false, true); - continue; - } - - if (CollectionUtils.isEmpty(component.getChildren())) { - // if not exist,need create it - if (!HadoopUtils.getInstance().exists(String.format("%s/%s", dstBasePath, component.getFullName()))) { - HadoopUtils.getInstance().mkdir(String.format("%s/%s", dstBasePath, component.getFullName())); - } - } else { - copyResourceFiles(component, srcBasePath, dstBasePath); - } - } - } - } + Map authorizedUser(User loginUser, Integer alertgroupId); /** * register user, default state is 0, default tenant_id is 1, no phone, no queue @@ -964,27 +240,7 @@ public class UsersService extends BaseService { * @return register result code * @throws Exception exception */ - @Transactional(rollbackFor = RuntimeException.class) - public Map registerUser(String userName, String userPassword, String repeatPassword, String email) { - Map result = new HashMap<>(); - - //check user params - String msg = this.checkUserParams(userName, userPassword, email, ""); - - if (!StringUtils.isEmpty(msg)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg); - return result; - } - - if (!userPassword.equals(repeatPassword)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "two passwords are not same"); - return result; - } - User user = createUser(userName, userPassword, email, 1, "", "", Flag.NO.ordinal()); - putMsg(result, Status.SUCCESS); - result.put(Constants.DATA_LIST, user); - return result; - } + Map registerUser(String userName, String userPassword, String repeatPassword, String email); /** * activate user, only system admin have permission, change user state code 0 to 1 @@ -993,41 +249,7 @@ public class UsersService extends BaseService { * @param userName user name * @return create result code */ - public Map activateUser(User loginUser, String userName) { - Map result = new HashMap<>(); - result.put(Constants.STATUS, false); - - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - if (!CheckUtils.checkUserName(userName)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); - return result; - } - - User user = userMapper.queryByUserNameAccurately(userName); - - if (user == null) { - putMsg(result, Status.USER_NOT_EXIST, userName); - return result; - } - - if (user.getState() != Flag.NO.ordinal()) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); - return result; - } - - user.setState(Flag.YES.ordinal()); - Date now = new Date(); - user.setUpdateTime(now); - userMapper.updateById(user); - User responseUser = userMapper.queryByUserNameAccurately(userName); - putMsg(result, Status.SUCCESS); - result.put(Constants.DATA_LIST, responseUser); - return result; - } + Map activateUser(User loginUser, String userName); /** * activate user, only system admin have permission, change users state code 0 to 1 @@ -1036,44 +258,5 @@ public class UsersService extends BaseService { * @param userNames user name * @return create result code */ - public Map batchActivateUser(User loginUser, List userNames) { - Map result = new HashMap<>(); - - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - int totalSuccess = 0; - List successUserNames = new ArrayList<>(); - Map successRes = new HashMap<>(); - int totalFailed = 0; - List> failedInfo = new ArrayList<>(); - Map failedRes = new HashMap<>(); - for (String userName : userNames) { - Map tmpResult = activateUser(loginUser, userName); - if (tmpResult.get(Constants.STATUS) != Status.SUCCESS) { - totalFailed++; - Map failedBody = new HashMap<>(); - failedBody.put("userName", userName); - Status status = (Status) tmpResult.get(Constants.STATUS); - String errorMessage = MessageFormat.format(status.getMsg(), userName); - failedBody.put("msg", errorMessage); - failedInfo.add(failedBody); - } else { - totalSuccess++; - successUserNames.add(userName); - } - } - successRes.put("sum", totalSuccess); - successRes.put("userName", successUserNames); - failedRes.put("sum", totalFailed); - failedRes.put("info", failedInfo); - Map res = new HashMap<>(); - res.put("success", successRes); - res.put("failed", failedRes); - putMsg(result, Status.SUCCESS); - result.put(Constants.DATA_LIST, res); - return result; - } + Map batchActivateUser(User loginUser, List userNames); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageService.java index 29fd99c0f7..360e813c68 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageService.java @@ -14,84 +14,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; - -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper; -import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; -import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.util.*; -@Service -public class WorkFlowLineageService extends BaseService { - - @Autowired - private WorkFlowLineageMapper workFlowLineageMapper; +package org.apache.dolphinscheduler.api.service; - public Map queryWorkFlowLineageByName(String workFlowName, int projectId) { - Map result = new HashMap<>(5); - List workFlowLineageList = workFlowLineageMapper.queryByName(workFlowName, projectId); - result.put(Constants.DATA_LIST, workFlowLineageList); - putMsg(result, Status.SUCCESS); - return result; - } +import java.util.Map; +import java.util.Set; - private List getWorkFlowRelationRecursion(Set ids, List workFlowRelations,Set sourceIds) { - for(int id : ids) { - sourceIds.addAll(ids); - List workFlowRelationsTmp = workFlowLineageMapper.querySourceTarget(id); - if(workFlowRelationsTmp != null && !workFlowRelationsTmp.isEmpty()) { - Set idsTmp = new HashSet<>(); - for(WorkFlowRelation workFlowRelation:workFlowRelationsTmp) { - if(!sourceIds.contains(workFlowRelation.getTargetWorkFlowId())){ - idsTmp.add(workFlowRelation.getTargetWorkFlowId()); - } - } - workFlowRelations.addAll(workFlowRelationsTmp); - getWorkFlowRelationRecursion(idsTmp, workFlowRelations,sourceIds); - } - } - return workFlowRelations; - } +/** + * work flow lineage service + */ +public interface WorkFlowLineageService { - public Map queryWorkFlowLineageByIds(Set ids,int projectId) { - Map result = new HashMap<>(5); - List workFlowLineageList = workFlowLineageMapper.queryByIds(ids, projectId); - Map workFlowLists = new HashMap<>(5); - Set idsV = new HashSet<>(); - if(ids == null || ids.isEmpty()){ - for(WorkFlowLineage workFlowLineage:workFlowLineageList) { - idsV.add(workFlowLineage.getWorkFlowId()); - } - } else { - idsV = ids; - } - List workFlowRelations = new ArrayList<>(); - Set sourceIds = new HashSet<>(); - getWorkFlowRelationRecursion(idsV, workFlowRelations, sourceIds); + Map queryWorkFlowLineageByName(String workFlowName, int projectId); - Set idSet = new HashSet<>(); - //If the incoming parameter is not empty, you need to add downstream workflow detail attributes - if(ids != null && !ids.isEmpty()) { - for(WorkFlowRelation workFlowRelation : workFlowRelations) { - idSet.add(workFlowRelation.getTargetWorkFlowId()); - } - for(int id : ids){ - idSet.remove(id); - } - if(!idSet.isEmpty()) { - workFlowLineageList.addAll(workFlowLineageMapper.queryByIds(idSet, projectId)); - } - } + Map queryWorkFlowLineageByIds(Set ids,int projectId); - workFlowLists.put("workFlowList",workFlowLineageList); - workFlowLists.put("workFlowRelationList",workFlowRelations); - result.put(Constants.DATA_LIST, workFlowLists); - putMsg(result, Status.SUCCESS); - return result; - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java index 1c634a9cd2..2f1ea2c857 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java @@ -17,41 +17,14 @@ package org.apache.dolphinscheduler.api.service; -import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; - -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.entity.WorkerGroup; -import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; -import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - /** - * work group service + * worker group service */ -@Service -public class WorkerGroupService extends BaseService { - - private static final String NO_NODE_EXCEPTION_REGEX = "KeeperException$NoNodeException"; - @Autowired - protected ZookeeperCachedOperator zookeeperCachedOperator; - @Autowired - ProcessInstanceMapper processInstanceMapper; +public interface WorkerGroupService { /** * query worker group paging @@ -62,118 +35,13 @@ public class WorkerGroupService extends BaseService { * @param pageSize page size * @return worker group list page */ - public Map queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal) { - - // list from index - Integer fromIndex = (pageNo - 1) * pageSize; - // list to index - Integer toIndex = (pageNo - 1) * pageSize + pageSize; - - Map result = new HashMap<>(); - if (isNotAdmin(loginUser, result)) { - return result; - } - - List workerGroups = getWorkerGroups(true); - - List resultDataList = new ArrayList<>(); - - if (CollectionUtils.isNotEmpty(workerGroups)) { - List searchValDataList = new ArrayList<>(); - - if (StringUtils.isNotEmpty(searchVal)) { - for (WorkerGroup workerGroup : workerGroups) { - if (workerGroup.getName().contains(searchVal)) { - searchValDataList.add(workerGroup); - } - } - } else { - searchValDataList = workerGroups; - } - - if (searchValDataList.size() < pageSize) { - toIndex = (pageNo - 1) * pageSize + searchValDataList.size(); - } - resultDataList = searchValDataList.subList(fromIndex, toIndex); - } - - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount(resultDataList.size()); - pageInfo.setLists(resultDataList); - - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - return result; - } + Map queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal); /** * query all worker group * * @return all worker group list */ - public Map queryAllGroup() { - Map result = new HashMap<>(); - - List workerGroups = getWorkerGroups(false); - - Set availableWorkerGroupSet = workerGroups.stream() - .map(workerGroup -> workerGroup.getName()) - .collect(Collectors.toSet()); - result.put(Constants.DATA_LIST, availableWorkerGroupSet); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * get worker groups - * - * @param isPaging whether paging - * @return WorkerGroup list - */ - private List getWorkerGroups(boolean isPaging) { - - String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS; - List workerGroups = new ArrayList<>(); - List workerGroupList; - try { - workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath); - } catch (Exception e) { - if (e.getMessage().contains(NO_NODE_EXCEPTION_REGEX)) { - if (isPaging) { - return workerGroups; - } else { - //ignore noNodeException return Default - WorkerGroup wg = new WorkerGroup(); - wg.setName(DEFAULT_WORKER_GROUP); - workerGroups.add(wg); - return workerGroups; - } - } else { - throw e; - } - } + Map queryAllGroup(); - for (String workerGroup : workerGroupList) { - String workerGroupPath = workerPath + "/" + workerGroup; - List childrenNodes = zookeeperCachedOperator.getChildrenKeys(workerGroupPath); - String timeStamp = ""; - for (int i = 0; i < childrenNodes.size(); i++) { - String ip = childrenNodes.get(i); - childrenNodes.set(i, ip.substring(0, ip.lastIndexOf(":"))); - timeStamp = ip.substring(ip.lastIndexOf(":")); - } - if (CollectionUtils.isNotEmpty(childrenNodes)) { - WorkerGroup wg = new WorkerGroup(); - wg.setName(workerGroup); - if (isPaging) { - wg.setIpList(childrenNodes); - String registeredIpValue = zookeeperCachedOperator.get(workerGroupPath + "/" + childrenNodes.get(0) + timeStamp); - wg.setCreateTime(DateUtils.stringToDate(registeredIpValue.split(",")[6])); - wg.setUpdateTime(DateUtils.stringToDate(registeredIpValue.split(",")[7])); - } - workerGroups.add(wg); - } - } - return workerGroups; - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java index da85621041..2864e9cda9 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java @@ -14,11 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.AccessTokenService; -import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; @@ -44,7 +44,7 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * access token service impl */ @Service -public class AccessTokenServiceImpl extends BaseService implements AccessTokenService { +public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTokenService { private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceImpl.class); @@ -60,8 +60,9 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe * @param pageSize page size * @return token list for page number and page size */ + @Override public Map queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Page page = new Page<>(pageNo, pageSize); @@ -86,8 +87,9 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe * @param token token string * @return create result code */ + @Override public Map createToken(User loginUser, int userId, String expireTime, String token) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (!hasPerm(loginUser,userId)){ putMsg(result, Status.USER_NO_OPERATION_PERM); @@ -123,8 +125,9 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe * @param expireTime token expire time * @return token string */ + @Override public Map generateToken(User loginUser, int userId, String expireTime) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (!hasPerm(loginUser,userId)){ putMsg(result, Status.USER_NO_OPERATION_PERM); return result; @@ -142,8 +145,9 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe * @param id token id * @return delete result code */ + @Override public Map delAccessTokenById(User loginUser, int id) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); AccessToken accessToken = accessTokenMapper.selectById(id); @@ -173,8 +177,9 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe * @param token token string * @return update result code */ + @Override public Map updateToken(User loginUser, int id, int userId, String expireTime, String token) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (!hasPerm(loginUser,userId)){ putMsg(result, Status.USER_NO_OPERATION_PERM); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java new file mode 100644 index 0000000000..90df9a09a9 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.AlertGroupService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.AlertGroup; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * alert group service impl + */ +@Service +public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroupService { + + @Autowired + private AlertGroupMapper alertGroupMapper; + + /** + * query alert group list + * + * @return alert group list + */ + @Override + public Map queryAlertgroup() { + + HashMap result = new HashMap<>(); + List alertGroups = alertGroupMapper.queryAllGroupList(); + result.put(Constants.DATA_LIST, alertGroups); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * paging query alarm group list + * + * @param loginUser login user + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @return alert group list page + */ + @Override + public Map listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + + Page page = new Page<>(pageNo, pageSize); + IPage alertGroupIPage = alertGroupMapper.queryAlertGroupPage( + page, searchVal); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int) alertGroupIPage.getTotal()); + pageInfo.setLists(alertGroupIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * create alert group + * + * @param loginUser login user + * @param groupName group name + * @param desc description + * @param alertInstanceIds alertInstanceIds + * @return create result code + */ + @Override + public Map createAlertgroup(User loginUser, String groupName, String desc, String alertInstanceIds) { + Map result = new HashMap<>(); + //only admin can operate + if (isNotAdmin(loginUser, result)) { + return result; + } + + AlertGroup alertGroup = new AlertGroup(); + Date now = new Date(); + + alertGroup.setGroupName(groupName); + alertGroup.setAlertInstanceIds(alertInstanceIds); + alertGroup.setDescription(desc); + alertGroup.setCreateTime(now); + alertGroup.setUpdateTime(now); + alertGroup.setCreateUserId(loginUser.getId()); + + // insert + int insert = alertGroupMapper.insert(alertGroup); + + if (insert > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.CREATE_ALERT_GROUP_ERROR); + } + return result; + } + + /** + * updateProcessInstance alert group + * + * @param loginUser login user + * @param id alert group id + * @param groupName group name + * @param desc description + * @param alertInstanceIds alertInstanceIds + * @return update result code + */ + @Override + public Map updateAlertgroup(User loginUser, int id, String groupName, String desc, String alertInstanceIds) { + Map result = new HashMap<>(); + + if (isNotAdmin(loginUser, result)) { + return result; + } + + AlertGroup alertGroup = alertGroupMapper.selectById(id); + + if (alertGroup == null) { + putMsg(result, Status.ALERT_GROUP_NOT_EXIST); + return result; + + } + + Date now = new Date(); + + if (StringUtils.isNotEmpty(groupName)) { + alertGroup.setGroupName(groupName); + } + alertGroup.setDescription(desc); + alertGroup.setUpdateTime(now); + alertGroup.setCreateUserId(loginUser.getId()); + alertGroup.setAlertInstanceIds(alertInstanceIds); + alertGroupMapper.updateById(alertGroup); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete alert group by id + * + * @param loginUser login user + * @param id alert group id + * @return delete result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map delAlertgroupById(User loginUser, int id) { + Map result = new HashMap<>(); + result.put(Constants.STATUS, false); + + //only admin can operate + if (isNotAdmin(loginUser, result)) { + return result; + } + //check exist + AlertGroup alertGroup = alertGroupMapper.selectById(id); + if (alertGroup == null) { + putMsg(result, Status.ALERT_GROUP_NOT_EXIST); + return result; + } + alertGroupMapper.deleteById(id); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * verify group name exists + * + * @param groupName group name + * @return check result code + */ + @Override + public boolean existGroupName(String groupName) { + List alertGroup = alertGroupMapper.queryByGroupName(groupName); + return CollectionUtils.isNotEmpty(alertGroup); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java index 4d3d35a76e..95171edf06 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java @@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.AlertPluginInstanceService; -import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.vo.AlertPluginInstanceVO; import org.apache.dolphinscheduler.common.Constants; @@ -32,9 +31,6 @@ import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper; import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import org.apache.commons.collections4.MapUtils; import java.util.ArrayList; import java.util.Arrays; @@ -57,7 +53,7 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; */ @Service @Lazy -public class AlertPluginInstanceServiceImpl extends BaseService implements AlertPluginInstanceService { +public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements AlertPluginInstanceService { @Autowired private AlertPluginInstanceMapper alertPluginInstanceMapper; @@ -250,26 +246,15 @@ public class AlertPluginInstanceServiceImpl extends BaseService implements Alert } /** - * parseToPluginUiParams + * parse To Plugin Ui Params * * @param pluginParamsMapString k-v data * @param pluginUiParams Complete parameters(include ui) * @return Complete parameters list(include ui) */ private String parseToPluginUiParams(String pluginParamsMapString, String pluginUiParams) { - Map paramsMap = JSONUtils.toMap(pluginParamsMapString); - if (MapUtils.isEmpty(paramsMap)) { - return null; - } - List pluginParamsList = JSONUtils.toList(pluginUiParams, PluginParams.class); - List newPluginParamsList = new ArrayList<>(pluginParamsList.size()); - pluginParamsList.forEach(pluginParams -> { - pluginParams.setValue(paramsMap.get(pluginParams.getName())); - newPluginParamsList.add(pluginParams); - - }); - - return JSONUtils.toJsonString(newPluginParamsList); + List> pluginParamsList = PluginParamsTransfer.generatePluginParams(pluginParamsMapString, pluginUiParams); + return JSONUtils.toJsonString(pluginParamsList); } private boolean checkHasAssociatedAlertGroup(String id) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java new file mode 100644 index 0000000000..6b581dbb51 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.BaseService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.io.IOException; +import java.text.MessageFormat; +import java.util.Map; + +/** + * base service impl + */ +public class BaseServiceImpl implements BaseService { + + /** + * check admin + * + * @param user input user + * @return ture if administrator, otherwise return false + */ + @Override + public boolean isAdmin(User user) { + return user.getUserType() == UserType.ADMIN_USER; + } + + /** + * isNotAdmin + * + * @param loginUser login user + * @param result result code + * @return true if not administrator, otherwise false + */ + @Override + public boolean isNotAdmin(User loginUser, Map result) { + //only admin can operate + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return true; + } + return false; + } + + /** + * put message to map + * + * @param result result code + * @param status status + * @param statusParams status message + */ + @Override + public void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } + } + + /** + * put message to result object + * + * @param result result code + * @param status status + * @param statusParams status message + */ + @Override + public void putMsg(Result result, Status status, Object... statusParams) { + result.setCode(status.getCode()); + if (statusParams != null && statusParams.length > 0) { + result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.setMsg(status.getMsg()); + } + } + + /** + * check + * + * @param result result + * @param bool bool + * @param userNoOperationPerm status + * @return check result + */ + @Override + public boolean check(Map result, boolean bool, Status userNoOperationPerm) { + // only admin can operate + if (bool) { + result.put(Constants.STATUS, userNoOperationPerm); + result.put(Constants.MSG, userNoOperationPerm.getMsg()); + return true; + } + return false; + } + + /** + * create tenant dir if not exists + * + * @param tenantCode tenant code + * @throws IOException if hdfs operation exception + */ + @Override + public void createTenantDirIfNotExists(String tenantCode) throws IOException { + String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); + String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); + // init resource path and udf path + HadoopUtils.getInstance().mkdir(resourcePath); + HadoopUtils.getInstance().mkdir(udfsPath); + } + + /** + * has perm + * + * @param operateUser operate user + * @param createUserId create user id + */ + @Override + public boolean hasPerm(User operateUser, int createUserId) { + return operateUser.getId() == createUserId || isAdmin(operateUser); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java index 7254fc1a88..aaa4cd31fb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java @@ -14,13 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.dto.CommandStateCount; import org.apache.dolphinscheduler.api.dto.DefineUserDto; import org.apache.dolphinscheduler.api.dto.TaskCountDto; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.service.DataAnalysisService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.common.Constants; @@ -60,7 +60,7 @@ import org.springframework.stereotype.Service; * data analysis service impl */ @Service -public class DataAnalysisServiceImpl extends BaseService implements DataAnalysisService { +public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnalysisService { @Autowired private ProjectMapper projectMapper; @@ -95,6 +95,7 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis * @param endDate end date * @return task state count data */ + @Override public Map countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) { return countStateByProject( @@ -114,6 +115,7 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis * @param endDate end date * @return process instance state count data */ + @Override public Map countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) { Map result = this.countStateByProject( loginUser, @@ -130,7 +132,7 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis private Map countStateByProject(User loginUser, int projectId, String startDate, String endDate , TriFunction> instanceStateCounter) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); boolean checkProject = checkProject(loginUser, projectId, result); if (!checkProject) { return result; @@ -167,6 +169,7 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis * @param projectId project id * @return definition count data */ + @Override public Map countDefinitionByUser(User loginUser, int projectId) { Map result = new HashMap<>(); @@ -191,9 +194,10 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis * @param endDate end date * @return command state count data */ + @Override public Map countCommandState(User loginUser, int projectId, String startDate, String endDate) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); boolean checkProject = checkProject(loginUser, projectId, result); if (!checkProject) { return result; @@ -263,8 +267,9 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis * @param projectId project id * @return queue state count data */ + @Override public Map countQueueState(User loginUser, int projectId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); boolean checkProject = checkProject(loginUser, projectId, result); if (!checkProject) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java new file mode 100644 index 0000000000..48e73e22f9 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java @@ -0,0 +1,671 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.DataSourceService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DbConnectType; +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.CommonUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; +import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; +import org.apache.dolphinscheduler.dao.datasource.OracleDataSource; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; +import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + * data source service impl + */ +@Service +public class DataSourceServiceImpl extends BaseServiceImpl implements DataSourceService { + + private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceImpl.class); + + public static final String NAME = "name"; + public static final String NOTE = "note"; + public static final String TYPE = "type"; + public static final String HOST = "host"; + public static final String PORT = "port"; + public static final String PRINCIPAL = "principal"; + public static final String DATABASE = "database"; + public static final String USER_NAME = "userName"; + public static final String OTHER = "other"; + + @Autowired + private DataSourceMapper dataSourceMapper; + + @Autowired + private DataSourceUserMapper datasourceUserMapper; + + /** + * create data source + * + * @param loginUser login user + * @param name data source name + * @param desc data source description + * @param type data source type + * @param parameter datasource parameters + * @return create result code + */ + @Override + public Result createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { + + Result result = new Result<>(); + // check name can use or not + if (checkName(name)) { + putMsg(result, Status.DATASOURCE_EXIST); + return result; + } + Result isConnection = checkConnection(type, parameter); + if (Status.SUCCESS.getCode() != isConnection.getCode()) { + return result; + } + + // build datasource + DataSource dataSource = new DataSource(); + Date now = new Date(); + + dataSource.setName(name.trim()); + dataSource.setNote(desc); + dataSource.setUserId(loginUser.getId()); + dataSource.setUserName(loginUser.getUserName()); + dataSource.setType(type); + dataSource.setConnectionParams(parameter); + dataSource.setCreateTime(now); + dataSource.setUpdateTime(now); + dataSourceMapper.insert(dataSource); + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * updateProcessInstance datasource + * + * @param loginUser login user + * @param name data source name + * @param desc data source description + * @param type data source type + * @param parameter datasource parameters + * @param id data source id + * @return update result code + */ + @Override + public Result updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { + + Result result = new Result<>(); + // determine whether the data source exists + DataSource dataSource = dataSourceMapper.selectById(id); + if (dataSource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + + if (!hasPerm(loginUser, dataSource.getUserId())) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + //check name can use or not + if (!name.trim().equals(dataSource.getName()) && checkName(name)) { + putMsg(result, Status.DATASOURCE_EXIST); + return result; + } + //check password,if the password is not updated, set to the old password. + ObjectNode paramObject = JSONUtils.parseObject(parameter); + String password = paramObject.path(Constants.PASSWORD).asText(); + if (StringUtils.isBlank(password)) { + String oldConnectionParams = dataSource.getConnectionParams(); + ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams); + paramObject.put(Constants.PASSWORD, oldParams.path(Constants.PASSWORD).asText()); + } + // connectionParams json + String connectionParams = paramObject.toString(); + + Result isConnection = checkConnection(type, parameter); + if (Status.SUCCESS.getCode() != isConnection.getCode()) { + return result; + } + + Date now = new Date(); + + dataSource.setName(name.trim()); + dataSource.setNote(desc); + dataSource.setUserName(loginUser.getUserName()); + dataSource.setType(type); + dataSource.setConnectionParams(connectionParams); + dataSource.setUpdateTime(now); + dataSourceMapper.updateById(dataSource); + putMsg(result, Status.SUCCESS); + return result; + } + + private boolean checkName(String name) { + List queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim()); + return queryDataSource != null && !queryDataSource.isEmpty(); + } + + /** + * updateProcessInstance datasource + * + * @param id datasource id + * @return data source detail + */ + @Override + public Map queryDataSource(int id) { + + Map result = new HashMap<>(); + DataSource dataSource = dataSourceMapper.selectById(id); + if (dataSource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + // type + String dataSourceType = dataSource.getType().toString(); + // name + String dataSourceName = dataSource.getName(); + // desc + String desc = dataSource.getNote(); + // parameter + String parameter = dataSource.getConnectionParams(); + + BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter); + DbConnectType connectType = null; + String hostSeperator = Constants.DOUBLE_SLASH; + if (DbType.ORACLE.equals(dataSource.getType())) { + connectType = ((OracleDataSource) datasourceForm).getConnectType(); + if (DbConnectType.ORACLE_SID.equals(connectType)) { + hostSeperator = Constants.AT_SIGN; + } + } + String database = datasourceForm.getDatabase(); + // jdbc connection params + String other = datasourceForm.getOther(); + String address = datasourceForm.getAddress(); + + String[] hostsPorts = getHostsAndPort(address, hostSeperator); + // ip host + String host = hostsPorts[0]; + // prot + String port = hostsPorts[1]; + String separator = ""; + + switch (dataSource.getType()) { + case HIVE: + case SQLSERVER: + separator = ";"; + break; + case MYSQL: + case POSTGRESQL: + case CLICKHOUSE: + case ORACLE: + case PRESTO: + separator = "&"; + break; + default: + separator = "&"; + break; + } + + Map otherMap = new LinkedHashMap<>(); + if (other != null) { + String[] configs = other.split(separator); + for (String config : configs) { + otherMap.put(config.split("=")[0], config.split("=")[1]); + } + + } + + Map map = new HashMap<>(); + map.put(NAME, dataSourceName); + map.put(NOTE, desc); + map.put(TYPE, dataSourceType); + if (connectType != null) { + map.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType); + } + + map.put(HOST, host); + map.put(PORT, port); + map.put(PRINCIPAL, datasourceForm.getPrincipal()); + map.put(Constants.KERBEROS_KRB5_CONF_PATH, datasourceForm.getJavaSecurityKrb5Conf()); + map.put(Constants.KERBEROS_KEY_TAB_USERNAME, datasourceForm.getLoginUserKeytabUsername()); + map.put(Constants.KERBEROS_KEY_TAB_PATH, datasourceForm.getLoginUserKeytabPath()); + map.put(DATABASE, database); + map.put(USER_NAME, datasourceForm.getUser()); + map.put(OTHER, otherMap); + result.put(Constants.DATA_LIST, map); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query datasource list by keyword + * + * @param loginUser login user + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @return data source list page + */ + @Override + public Map queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(); + IPage dataSourceList; + Page dataSourcePage = new Page<>(pageNo, pageSize); + + if (isAdmin(loginUser)) { + dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal); + } else { + dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal); + } + + List dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>(); + handlePasswd(dataSources); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L)); + pageInfo.setLists(dataSources); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * handle datasource connection password for safety + * + * @param dataSourceList + */ + private void handlePasswd(List dataSourceList) { + for (DataSource dataSource : dataSourceList) { + String connectionParams = dataSource.getConnectionParams(); + ObjectNode object = JSONUtils.parseObject(connectionParams); + object.put(Constants.PASSWORD, getHiddenPassword()); + dataSource.setConnectionParams(object.toString()); + } + } + + /** + * get hidden password (resolve the security hotspot) + * + * @return hidden password + */ + private String getHiddenPassword() { + return Constants.XXXXXX; + } + + /** + * query data resource list + * + * @param loginUser login user + * @param type data source type + * @return data source list page + */ + @Override + public Map queryDataSourceList(User loginUser, Integer type) { + Map result = new HashMap<>(); + + List datasourceList; + + if (isAdmin(loginUser)) { + datasourceList = dataSourceMapper.listAllDataSourceByType(type); + } else { + datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type); + } + + result.put(Constants.DATA_LIST, datasourceList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify datasource exists + * + * @param name datasource name + * @return true if data datasource not exists, otherwise return false + */ + @Override + public Result verifyDataSourceName(String name) { + Result result = new Result<>(); + List dataSourceList = dataSourceMapper.queryDataSourceByName(name); + if (dataSourceList != null && !dataSourceList.isEmpty()) { + putMsg(result, Status.DATASOURCE_EXIST); + } else { + putMsg(result, Status.SUCCESS); + } + + return result; + } + + /** + * check connection + * + * @param type data source type + * @param parameter data source parameters + * @return true if connect successfully, otherwise false + */ + @Override + public Result checkConnection(DbType type, String parameter) { + Result result = new Result<>(); + BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter); + if (datasource == null) { + putMsg(result, Status.DATASOURCE_TYPE_NOT_EXIST, type); + return result; + } + try (Connection connection = datasource.getConnection()) { + if (connection == null) { + putMsg(result, Status.CONNECTION_TEST_FAILURE); + return result; + } + putMsg(result, Status.SUCCESS); + return result; + } catch (Exception e) { + logger.error("datasource test connection error, dbType:{}, jdbcUrl:{}, message:{}.", type, datasource.getJdbcUrl(), e.getMessage()); + return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(),e.getMessage()); + } + } + + /** + * test connection + * + * @param id datasource id + * @return connect result code + */ + @Override + public Result connectionTest(int id) { + DataSource dataSource = dataSourceMapper.selectById(id); + if (dataSource == null) { + Result result = new Result<>(); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); + } + + /** + * build paramters + * + * @param type data source type + * @param host data source host + * @param port data source port + * @param database data source database name + * @param userName user name + * @param password password + * @param other other parameters + * @param principal principal + * @return datasource parameter + */ + @Override + public String buildParameter(DbType type, String host, + String port, String database, String principal, String userName, + String password, DbConnectType connectType, String other, + String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) { + + String address = buildAddress(type, host, port, connectType); + Map parameterMap = new LinkedHashMap<>(); + String jdbcUrl; + if (DbType.SQLSERVER == type) { + jdbcUrl = address + ";databaseName=" + database; + } else { + jdbcUrl = address + "/" + database; + } + + if (Constants.ORACLE.equals(type.name())) { + parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType); + } + + if (CommonUtils.getKerberosStartupState() + && (type == DbType.HIVE || type == DbType.SPARK)) { + jdbcUrl += ";principal=" + principal; + } + + String separator = ""; + if (Constants.MYSQL.equals(type.name()) + || Constants.POSTGRESQL.equals(type.name()) + || Constants.CLICKHOUSE.equals(type.name()) + || Constants.ORACLE.equals(type.name()) + || Constants.PRESTO.equals(type.name())) { + separator = "&"; + } else if (Constants.HIVE.equals(type.name()) + || Constants.SPARK.equals(type.name()) + || Constants.DB2.equals(type.name()) + || Constants.SQLSERVER.equals(type.name())) { + separator = ";"; + } + + parameterMap.put(TYPE, connectType); + parameterMap.put(Constants.ADDRESS, address); + parameterMap.put(Constants.DATABASE, database); + parameterMap.put(Constants.JDBC_URL, jdbcUrl); + parameterMap.put(Constants.USER, userName); + parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password)); + if (CommonUtils.getKerberosStartupState() + && (type == DbType.HIVE || type == DbType.SPARK)) { + parameterMap.put(Constants.PRINCIPAL, principal); + parameterMap.put(Constants.KERBEROS_KRB5_CONF_PATH, javaSecurityKrb5Conf); + parameterMap.put(Constants.KERBEROS_KEY_TAB_USERNAME, loginUserKeytabUsername); + parameterMap.put(Constants.KERBEROS_KEY_TAB_PATH, loginUserKeytabPath); + } + + Map map = JSONUtils.toMap(other); + if (map != null) { + StringBuilder otherSb = new StringBuilder(); + for (Map.Entry entry: map.entrySet()) { + otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator)); + } + if (!Constants.DB2.equals(type.name())) { + otherSb.deleteCharAt(otherSb.length() - 1); + } + parameterMap.put(Constants.OTHER, otherSb); + } + + if (logger.isDebugEnabled()) { + logger.info("parameters map:{}", JSONUtils.toJsonString(parameterMap)); + } + return JSONUtils.toJsonString(parameterMap); + + } + + private String buildAddress(DbType type, String host, String port, DbConnectType connectType) { + StringBuilder sb = new StringBuilder(); + if (Constants.MYSQL.equals(type.name())) { + sb.append(Constants.JDBC_MYSQL); + sb.append(host).append(":").append(port); + } else if (Constants.POSTGRESQL.equals(type.name())) { + sb.append(Constants.JDBC_POSTGRESQL); + sb.append(host).append(":").append(port); + } else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) { + sb.append(Constants.JDBC_HIVE_2); + String[] hostArray = host.split(","); + if (hostArray.length > 0) { + for (String zkHost : hostArray) { + sb.append(String.format("%s:%s,", zkHost, port)); + } + sb.deleteCharAt(sb.length() - 1); + } + } else if (Constants.CLICKHOUSE.equals(type.name())) { + sb.append(Constants.JDBC_CLICKHOUSE); + sb.append(host).append(":").append(port); + } else if (Constants.ORACLE.equals(type.name())) { + if (connectType == DbConnectType.ORACLE_SID) { + sb.append(Constants.JDBC_ORACLE_SID); + } else { + sb.append(Constants.JDBC_ORACLE_SERVICE_NAME); + } + sb.append(host).append(":").append(port); + } else if (Constants.SQLSERVER.equals(type.name())) { + sb.append(Constants.JDBC_SQLSERVER); + sb.append(host).append(":").append(port); + } else if (Constants.DB2.equals(type.name())) { + sb.append(Constants.JDBC_DB2); + sb.append(host).append(":").append(port); + } else if (Constants.PRESTO.equals(type.name())) { + sb.append(Constants.JDBC_PRESTO); + sb.append(host).append(":").append(port); + } + + return sb.toString(); + } + + /** + * delete datasource + * + * @param loginUser login user + * @param datasourceId data source id + * @return delete result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Result delete(User loginUser, int datasourceId) { + Result result = new Result<>(); + try { + //query datasource by id + DataSource dataSource = dataSourceMapper.selectById(datasourceId); + if (dataSource == null) { + logger.error("resource id {} not exist", datasourceId); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + if (!hasPerm(loginUser, dataSource.getUserId())) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + dataSourceMapper.deleteById(datasourceId); + datasourceUserMapper.deleteByDatasourceId(datasourceId); + putMsg(result, Status.SUCCESS); + } catch (Exception e) { + logger.error("delete datasource error", e); + throw new RuntimeException("delete datasource error"); + } + return result; + } + + /** + * unauthorized datasource + * + * @param loginUser login user + * @param userId user id + * @return unauthed data source result code + */ + @Override + public Map unauthDatasource(User loginUser, Integer userId) { + + Map result = new HashMap<>(); + //only admin operate + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + /** + * query all data sources except userId + */ + List resultList = new ArrayList<>(); + List datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId); + Set datasourceSet = null; + if (datasourceList != null && !datasourceList.isEmpty()) { + datasourceSet = new HashSet<>(datasourceList); + + List authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId); + + Set authedDataSourceSet = null; + if (authedDataSourceList != null && !authedDataSourceList.isEmpty()) { + authedDataSourceSet = new HashSet<>(authedDataSourceList); + datasourceSet.removeAll(authedDataSourceSet); + + } + resultList = new ArrayList<>(datasourceSet); + } + result.put(Constants.DATA_LIST, resultList); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * authorized datasource + * + * @param loginUser login user + * @param userId user id + * @return authorized result code + */ + @Override + public Map authedDatasource(User loginUser, Integer userId) { + Map result = new HashMap<>(); + + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + List authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId); + result.put(Constants.DATA_LIST, authedDatasourceList); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * get host and port by address + * + * @param address address + * @param separator separator + * @return sting array: [host,port] + */ + private String[] getHostsAndPort(String address, String separator) { + String[] result = new String[2]; + String[] tmpArray = address.split(separator); + String hostsAndPorts = tmpArray[tmpArray.length - 1]; + StringBuilder hosts = new StringBuilder(); + String[] hostPortArray = hostsAndPorts.split(Constants.COMMA); + String port = hostPortArray[0].split(Constants.COLON)[1]; + for (String hostPort : hostPortArray) { + hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA); + } + hosts.deleteCharAt(hosts.length() - 1); + result[0] = hosts.toString(); + result[1] = port; + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java new file mode 100644 index 0000000000..b3e2b2cd74 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java @@ -0,0 +1,582 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS; +import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT; + +import org.apache.dolphinscheduler.api.enums.ExecuteType; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ExecutorService; +import org.apache.dolphinscheduler.api.service.MonitorService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.RunMode; +import org.apache.dolphinscheduler.common.enums.TaskDependType; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.model.Server; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; + +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * executor service impl + */ +@Service +public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorService { + + private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceImpl.class); + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProjectService projectService; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + private MonitorService monitorService; + + + @Autowired + private ProcessInstanceMapper processInstanceMapper; + + + @Autowired + private ProcessService processService; + + /** + * execute process instance + * + * @param loginUser login user + * @param projectName project name + * @param processDefinitionId process Definition Id + * @param cronTime cron time + * @param commandType command type + * @param failureStrategy failuer strategy + * @param startNodeList start nodelist + * @param taskDependType node dependency type + * @param warningType warning type + * @param warningGroupId notify group id + * @param processInstancePriority process instance priority + * @param workerGroup worker group name + * @param runMode run mode + * @param timeout timeout + * @param startParams the global param values which pass to new process instance + * @return execute process instance code + */ + @Override + public Map execProcessInstance(User loginUser, String projectName, + int processDefinitionId, String cronTime, CommandType commandType, + FailureStrategy failureStrategy, String startNodeList, + TaskDependType taskDependType, WarningType warningType, int warningGroupId, + RunMode runMode, + Priority processInstancePriority, String workerGroup, Integer timeout, + Map startParams) { + Map result = new HashMap<>(); + // timeout is invalid + if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { + putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR); + return result; + } + Project project = projectMapper.queryByName(projectName); + Map checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project); + if (checkResultAndAuth != null) { + return checkResultAndAuth; + } + + // check process define release state + ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId); + result = checkProcessDefinitionValid(processDefinition, processDefinitionId); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + + if (!checkTenantSuitable(processDefinition)) { + logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", + processDefinition.getId(), processDefinition.getName()); + putMsg(result, Status.TENANT_NOT_SUITABLE); + return result; + } + + // check master exists + if (!checkMasterExists(result)) { + return result; + } + + /** + * create command + */ + int create = this.createCommand(commandType, processDefinitionId, + taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), + warningGroupId, runMode, processInstancePriority, workerGroup, startParams); + + if (create > 0) { + processDefinition.setWarningGroupId(warningGroupId); + processDefinitionMapper.updateById(processDefinition); + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); + } + return result; + } + + /** + * check whether master exists + * + * @param result result + * @return master exists return true , otherwise return false + */ + private boolean checkMasterExists(Map result) { + // check master server exists + List masterServers = monitorService.getServerListFromZK(true); + + // no master + if (masterServers.isEmpty()) { + putMsg(result, Status.MASTER_NOT_EXISTS); + return false; + } + return true; + } + + /** + * check whether the process definition can be executed + * + * @param processDefinition process definition + * @param processDefineId process definition id + * @return check result code + */ + @Override + public Map checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId) { + Map result = new HashMap<>(); + if (processDefinition == null) { + // check process definition exists + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); + } else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { + // check process definition online + putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefineId); + } else { + result.put(Constants.STATUS, Status.SUCCESS); + } + return result; + } + + /** + * do action to process instance:pause, stop, repeat, recover from pause, recover from stop + * + * @param loginUser login user + * @param projectName project name + * @param processInstanceId process instance id + * @param executeType execute type + * @return execute result code + */ + @Override + public Map execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = checkResultAndAuth(loginUser, projectName, project); + if (checkResult != null) { + return checkResult; + } + + // check master exists + if (!checkMasterExists(result)) { + return result; + } + + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); + if (processInstance == null) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + return result; + } + + ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); + if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) { + result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId()); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + } + + checkResult = checkExecuteType(processInstance, executeType); + Status status = (Status) checkResult.get(Constants.STATUS); + if (status != Status.SUCCESS) { + return checkResult; + } + if (!checkTenantSuitable(processDefinition)) { + logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", + processDefinition.getId(), processDefinition.getName()); + putMsg(result, Status.TENANT_NOT_SUITABLE); + } + + switch (executeType) { + case REPEAT_RUNNING: + result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING); + break; + case RECOVER_SUSPENDED_PROCESS: + result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS); + break; + case START_FAILURE_TASK_PROCESS: + result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS); + break; + case STOP: + if (processInstance.getState() == ExecutionStatus.READY_STOP) { + putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); + } else { + result = updateProcessInstancePrepare(processInstance, CommandType.STOP, ExecutionStatus.READY_STOP); + } + break; + case PAUSE: + if (processInstance.getState() == ExecutionStatus.READY_PAUSE) { + putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); + } else { + result = updateProcessInstancePrepare(processInstance, CommandType.PAUSE, ExecutionStatus.READY_PAUSE); + } + break; + default: + logger.error("unknown execute type : {}", executeType); + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); + + break; + } + return result; + } + + /** + * check tenant suitable + * + * @param processDefinition process definition + * @return true if tenant suitable, otherwise return false + */ + private boolean checkTenantSuitable(ProcessDefinition processDefinition) { + Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(), + processDefinition.getUserId()); + return tenant != null; + } + + /** + * Check the state of process instance and the type of operation match + * + * @param processInstance process instance + * @param executeType execute type + * @return check result code + */ + private Map checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) { + + Map result = new HashMap<>(); + ExecutionStatus executionStatus = processInstance.getState(); + boolean checkResult = false; + switch (executeType) { + case PAUSE: + case STOP: + if (executionStatus.typeIsRunning()) { + checkResult = true; + } + break; + case REPEAT_RUNNING: + if (executionStatus.typeIsFinished()) { + checkResult = true; + } + break; + case START_FAILURE_TASK_PROCESS: + if (executionStatus.typeIsFailure()) { + checkResult = true; + } + break; + case RECOVER_SUSPENDED_PROCESS: + if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) { + checkResult = true; + } + break; + default: + break; + } + if (!checkResult) { + putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); + } else { + putMsg(result, Status.SUCCESS); + } + return result; + } + + /** + * prepare to update process instance command type and status + * + * @param processInstance process instance + * @param commandType command type + * @param executionStatus execute status + * @return update result + */ + private Map updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) { + Map result = new HashMap<>(); + + processInstance.setCommandType(commandType); + processInstance.addHistoryCmd(commandType); + processInstance.setState(executionStatus); + int update = processService.updateProcessInstance(processInstance); + + // determine whether the process is normal + if (update > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); + } + return result; + } + + /** + * insert command, used in the implementation of the page, re run, recovery (pause / failure) execution + * + * @param loginUser login user + * @param instanceId instance id + * @param processDefinitionId process definition id + * @param commandType command type + * @return insert result code + */ + private Map insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) { + Map result = new HashMap<>(); + Command command = new Command(); + command.setCommandType(commandType); + command.setProcessDefinitionId(processDefinitionId); + command.setCommandParam(String.format("{\"%s\":%d}", + CMD_PARAM_RECOVER_PROCESS_ID_STRING, instanceId)); + command.setExecutorId(loginUser.getId()); + + if (!processService.verifyIsNeedCreateCommand(command)) { + putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionId); + return result; + } + + int create = processService.createCommand(command); + + if (create > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); + } + + return result; + } + + /** + * check if sub processes are offline before starting process definition + * + * @param processDefineId process definition id + * @return check result code + */ + @Override + public Map startCheckByProcessDefinedId(int processDefineId) { + Map result = new HashMap<>(); + + if (processDefineId == 0) { + logger.error("process definition id is null"); + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "process definition id"); + } + List ids = new ArrayList<>(); + processService.recurseFindSubProcessId(processDefineId, ids); + Integer[] idArray = ids.toArray(new Integer[ids.size()]); + if (!ids.isEmpty()) { + List processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); + if (processDefinitionList != null) { + for (ProcessDefinition processDefinition : processDefinitionList) { + /** + * if there is no online process, exit directly + */ + if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { + putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); + logger.info("not release process definition id: {} , name : {}", + processDefinition.getId(), processDefinition.getName()); + return result; + } + } + } + } + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * create command + * + * @param commandType commandType + * @param processDefineId processDefineId + * @param nodeDep nodeDep + * @param failureStrategy failureStrategy + * @param startNodeList startNodeList + * @param schedule schedule + * @param warningType warningType + * @param executorId executorId + * @param warningGroupId warningGroupId + * @param runMode runMode + * @param processInstancePriority processInstancePriority + * @param workerGroup workerGroup + * @return command id + */ + private int createCommand(CommandType commandType, int processDefineId, + TaskDependType nodeDep, FailureStrategy failureStrategy, + String startNodeList, String schedule, WarningType warningType, + int executorId, int warningGroupId, + RunMode runMode, Priority processInstancePriority, String workerGroup, + Map startParams) { + + /** + * instantiate command schedule instance + */ + Command command = new Command(); + + Map cmdParam = new HashMap<>(); + if (commandType == null) { + command.setCommandType(CommandType.START_PROCESS); + } else { + command.setCommandType(commandType); + } + command.setProcessDefinitionId(processDefineId); + if (nodeDep != null) { + command.setTaskDependType(nodeDep); + } + if (failureStrategy != null) { + command.setFailureStrategy(failureStrategy); + } + + if (StringUtils.isNotEmpty(startNodeList)) { + cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList); + } + if (warningType != null) { + command.setWarningType(warningType); + } + if (startParams != null && startParams.size() > 0) { + cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams)); + } + command.setCommandParam(JSONUtils.toJsonString(cmdParam)); + command.setExecutorId(executorId); + command.setWarningGroupId(warningGroupId); + command.setProcessInstancePriority(processInstancePriority); + command.setWorkerGroup(workerGroup); + + Date start = null; + Date end = null; + if (StringUtils.isNotEmpty(schedule)) { + String[] interval = schedule.split(","); + if (interval.length == 2) { + start = DateUtils.getScheduleDate(interval[0]); + end = DateUtils.getScheduleDate(interval[1]); + } + } + + // determine whether to complement + if (commandType == CommandType.COMPLEMENT_DATA) { + runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; + if (null != start && null != end && !start.after(end)) { + if (runMode == RunMode.RUN_MODE_SERIAL) { + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); + command.setCommandParam(JSONUtils.toJsonString(cmdParam)); + return processService.createCommand(command); + } else if (runMode == RunMode.RUN_MODE_PARALLEL) { + List schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId); + List listDate = new LinkedList<>(); + if (!CollectionUtils.isEmpty(schedules)) { + for (Schedule item : schedules) { + listDate.addAll(CronUtils.getSelfFireDateList(start, end, item.getCrontab())); + } + } + if (!CollectionUtils.isEmpty(listDate)) { + // loop by schedule date + for (Date date : listDate) { + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date)); + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date)); + command.setCommandParam(JSONUtils.toJsonString(cmdParam)); + processService.createCommand(command); + } + return listDate.size(); + } else { + // loop by day + int runCunt = 0; + while (!start.after(end)) { + runCunt += 1; + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); + command.setCommandParam(JSONUtils.toJsonString(cmdParam)); + processService.createCommand(command); + start = DateUtils.getSomeDay(start, 1); + } + return runCunt; + } + } + } else { + logger.error("there is not valid schedule date for the process definition: id:{}", processDefineId); + } + } else { + command.setCommandParam(JSONUtils.toJsonString(cmdParam)); + return processService.createCommand(command); + } + + return 0; + } + + /** + * check result and auth + */ + private Map checkResultAndAuth(User loginUser, String projectName, Project project) { + // check project auth + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status status = (Status) checkResult.get(Constants.STATUS); + if (status != Status.SUCCESS) { + return checkResult; + } + return null; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java index 1574e7f0e7..db46c5bdfb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; @@ -41,7 +42,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** - * log service + * logger service impl */ @Service public class LoggerServiceImpl implements LoggerService { @@ -77,6 +78,7 @@ public class LoggerServiceImpl implements LoggerService { * @param limit limit * @return log string data */ + @Override @SuppressWarnings("unchecked") public Result queryLog(int taskInstId, int skipLineNum, int limit) { @@ -116,6 +118,7 @@ public class LoggerServiceImpl implements LoggerService { * @param taskInstId task instance id * @return log byte array */ + @Override public byte[] getLogBytes(int taskInstId) { TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java new file mode 100644 index 0000000000..7f54e3ef95 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import static org.apache.dolphinscheduler.common.utils.Preconditions.checkNotNull; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.MonitorService; +import org.apache.dolphinscheduler.api.utils.ZookeeperMonitor; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ZKNodeType; +import org.apache.dolphinscheduler.common.model.Server; +import org.apache.dolphinscheduler.common.model.WorkerServerModel; +import org.apache.dolphinscheduler.dao.MonitorDBDao; +import org.apache.dolphinscheduler.dao.entity.MonitorRecord; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.google.common.collect.Sets; + +/** + * monitor service impl + */ +@Service +public class MonitorServiceImpl extends BaseServiceImpl implements MonitorService { + + @Autowired + private ZookeeperMonitor zookeeperMonitor; + + @Autowired + private MonitorDBDao monitorDBDao; + + /** + * query database state + * + * @param loginUser login user + * @return data base state + */ + @Override + public Map queryDatabaseState(User loginUser) { + Map result = new HashMap<>(); + + List monitorRecordList = monitorDBDao.queryDatabaseState(); + + result.put(Constants.DATA_LIST, monitorRecordList); + putMsg(result, Status.SUCCESS); + + return result; + + } + + /** + * query master list + * + * @param loginUser login user + * @return master information list + */ + @Override + public Map queryMaster(User loginUser) { + + Map result = new HashMap<>(); + + List masterServers = getServerListFromZK(true); + result.put(Constants.DATA_LIST, masterServers); + putMsg(result,Status.SUCCESS); + + return result; + } + + /** + * query zookeeper state + * + * @param loginUser login user + * @return zookeeper information list + */ + @Override + public Map queryZookeeperState(User loginUser) { + Map result = new HashMap<>(); + + List zookeeperRecordList = zookeeperMonitor.zookeeperInfoList(); + + result.put(Constants.DATA_LIST, zookeeperRecordList); + putMsg(result, Status.SUCCESS); + + return result; + + } + + /** + * query worker list + * + * @param loginUser login user + * @return worker information list + */ + @Override + public Map queryWorker(User loginUser) { + + Map result = new HashMap<>(); + List workerServers = getServerListFromZK(false) + .stream() + .map((Server server) -> { + WorkerServerModel model = new WorkerServerModel(); + model.setId(server.getId()); + model.setHost(server.getHost()); + model.setPort(server.getPort()); + model.setZkDirectories(Sets.newHashSet(server.getZkDirectory())); + model.setResInfo(server.getResInfo()); + model.setCreateTime(server.getCreateTime()); + model.setLastHeartbeatTime(server.getLastHeartbeatTime()); + return model; + }) + .collect(Collectors.toList()); + + Map workerHostPortServerMapping = workerServers + .stream() + .collect(Collectors.toMap( + (WorkerServerModel worker) -> { + String[] s = worker.getZkDirectories().iterator().next().split("/"); + return s[s.length - 1]; + } + , Function.identity() + , (WorkerServerModel oldOne, WorkerServerModel newOne) -> { + oldOne.getZkDirectories().addAll(newOne.getZkDirectories()); + return oldOne; + })); + + result.put(Constants.DATA_LIST, workerHostPortServerMapping.values()); + putMsg(result,Status.SUCCESS); + + return result; + } + + @Override + public List getServerListFromZK(boolean isMaster) { + + checkNotNull(zookeeperMonitor); + ZKNodeType zkNodeType = isMaster ? ZKNodeType.MASTER : ZKNodeType.WORKER; + return zookeeperMonitor.getServersList(zkNodeType); + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java index 2a2ae78618..97cea0281a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java @@ -23,7 +23,6 @@ import org.apache.dolphinscheduler.api.dto.ProcessMeta; import org.apache.dolphinscheduler.api.dto.treeview.Instance; import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; import org.apache.dolphinscheduler.api.service.ProcessInstanceService; @@ -111,8 +110,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; * process definition service impl */ @Service -public class ProcessDefinitionServiceImpl extends BaseService implements - ProcessDefinitionService { +public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements ProcessDefinitionService { private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class); @@ -146,6 +144,9 @@ public class ProcessDefinitionServiceImpl extends BaseService implements @Autowired private ProcessService processService; + @Autowired + private SchedulerService schedulerService; + /** * create process definition * @@ -273,7 +274,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements @Override public Map queryProcessDefinitionList(User loginUser, String projectName) { - HashMap result = new HashMap<>(5); + HashMap result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); @@ -399,7 +400,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements String desc, String locations, String connects) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); @@ -514,7 +515,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements @Transactional(rollbackFor = RuntimeException.class) public Map deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); @@ -634,7 +635,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements // set status schedule.setReleaseState(ReleaseState.OFFLINE); scheduleMapper.updateById(schedule); - SchedulerService.deleteSchedule(project.getId(), schedule.getId()); + schedulerService.deleteSchedule(project.getId(), schedule.getId()); } break; default: @@ -823,7 +824,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements @Override @Transactional(rollbackFor = RuntimeException.class) public Map importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); String processMetaJson = FileUtils.file2String(file); List processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class); @@ -992,7 +993,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements } //recursive sub-process parameter correction map key for old process id value for new process id - Map subProcessIdMap = new HashMap<>(20); + Map subProcessIdMap = new HashMap<>(); List subProcessList = StreamUtils.asStream(jsonArray.elements()) .filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText())) @@ -1283,7 +1284,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements @Override public Map queryProcessDefinitionAllByProjectId(Integer projectId) { - HashMap result = new HashMap<>(5); + HashMap result = new HashMap<>(); List resourceList = processDefineMapper.queryAllDefinitionList(projectId); result.put(Constants.DATA_LIST, resourceList); @@ -1494,7 +1495,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements Integer processId, Project targetProject) throws JsonProcessingException { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); ProcessDefinition processDefinition = processDefineMapper.selectById(processId); if (processDefinition == null) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java index 08cedfee15..be7a3e93b0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.utils.PageInfo; @@ -42,9 +41,11 @@ import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.google.common.collect.ImmutableMap; +/** + * process definition version service impl + */ @Service -public class ProcessDefinitionVersionServiceImpl extends BaseService implements - ProcessDefinitionVersionService { +public class ProcessDefinitionVersionServiceImpl extends BaseServiceImpl implements ProcessDefinitionVersionService { @Autowired private ProcessDefinitionVersionMapper processDefinitionVersionMapper; @@ -61,6 +62,7 @@ public class ProcessDefinitionVersionServiceImpl extends BaseService implements * @param processDefinition the process definition that need to record version * @return the newest version number of this process definition */ + @Override public long addProcessDefinitionVersion(ProcessDefinition processDefinition) { long version = this.queryMaxVersionByProcessDefinitionId(processDefinition.getId()) + 1; @@ -110,6 +112,7 @@ public class ProcessDefinitionVersionServiceImpl extends BaseService implements * @param processDefinitionId process definition id * @return the pagination process definition versions info of the certain process definition */ + @Override public Map queryProcessDefinitionVersions(User loginUser, String projectName, int pageNo, int pageSize, int processDefinitionId) { Map result = new HashMap<>(); @@ -151,6 +154,7 @@ public class ProcessDefinitionVersionServiceImpl extends BaseService implements * @param version version number * @return the process definition version info */ + @Override public ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(int processDefinitionId, long version) { return processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(processDefinitionId, version); } @@ -164,6 +168,7 @@ public class ProcessDefinitionVersionServiceImpl extends BaseService implements * @param version version number * @return delele result code */ + @Override public Map deleteByProcessDefinitionIdAndVersion(User loginUser, String projectName, int processDefinitionId, long version) { Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java new file mode 100644 index 0000000000..6659a7bcd6 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java @@ -0,0 +1,753 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import static org.apache.dolphinscheduler.common.Constants.DATA_LIST; +import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT; +import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS; +import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; +import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE; +import static org.apache.dolphinscheduler.common.Constants.TASK_LIST; + +import org.apache.dolphinscheduler.api.dto.gantt.GanttDto; +import org.apache.dolphinscheduler.api.dto.gantt.Task; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ExecutorService; +import org.apache.dolphinscheduler.api.service.LoggerService; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; +import org.apache.dolphinscheduler.api.service.ProcessInstanceService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.service.UsersService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessData; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.dao.utils.DagHelper; +import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * process instance service impl + */ +@Service +public class ProcessInstanceServiceImpl extends BaseServiceImpl implements ProcessInstanceService { + + @Autowired + ProjectMapper projectMapper; + + @Autowired + ProjectService projectService; + + @Autowired + ProcessService processService; + + @Autowired + ProcessInstanceMapper processInstanceMapper; + + @Autowired + ProcessDefinitionMapper processDefineMapper; + + @Autowired + ProcessDefinitionService processDefinitionService; + + @Autowired + ProcessDefinitionVersionService processDefinitionVersionService; + + @Autowired + ExecutorService execService; + + @Autowired + TaskInstanceMapper taskInstanceMapper; + + @Autowired + LoggerService loggerService; + + + @Autowired + UsersService usersService; + + /** + * return top n SUCCESS process instance order by running time which started between startTime and endTime + */ + @Override + public Map queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) { + Map result = new HashMap<>(); + + Project project = projectMapper.queryByName(projectName); + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + if (0 > size) { + putMsg(result, Status.NEGTIVE_SIZE_NUMBER_ERROR, size); + return result; + } + if (Objects.isNull(startTime)) { + putMsg(result, Status.DATA_IS_NULL, Constants.START_TIME); + return result; + } + Date start = DateUtils.stringToDate(startTime); + if (Objects.isNull(endTime)) { + putMsg(result, Status.DATA_IS_NULL, Constants.END_TIME); + return result; + } + Date end = DateUtils.stringToDate(endTime); + if (start == null || end == null) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE); + return result; + } + if (start.getTime() > end.getTime()) { + putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR, startTime, endTime); + return result; + } + + List processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS); + result.put(DATA_LIST, processInstances); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query process instance by id + * + * @param loginUser login user + * @param projectName project name + * @param processId process instance id + * @return process instance detail + */ + @Override + public Map queryProcessInstanceById(User loginUser, String projectName, Integer processId) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); + + ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); + processInstance.setWarningGroupId(processDefinition.getWarningGroupId()); + result.put(DATA_LIST, processInstance); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * paging query process instance list, filtering according to project, process definition, time range, keyword, process status + * + * @param loginUser login user + * @param projectName project name + * @param pageNo page number + * @param pageSize page size + * @param processDefineId process definition id + * @param searchVal search value + * @param stateType state type + * @param host host + * @param startDate start time + * @param endDate end time + * @return process instance list + */ + @Override + public Map queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, + String startDate, String endDate, + String searchVal, String executorName, ExecutionStatus stateType, String host, + Integer pageNo, Integer pageSize) { + + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + int[] statusArray = null; + // filter by state + if (stateType != null) { + statusArray = new int[]{stateType.ordinal()}; + } + + Date start = null; + Date end = null; + try { + if (StringUtils.isNotEmpty(startDate)) { + start = DateUtils.getScheduleDate(startDate); + } + if (StringUtils.isNotEmpty(endDate)) { + end = DateUtils.getScheduleDate(endDate); + } + } catch (Exception e) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE); + return result; + } + + Page page = new Page<>(pageNo, pageSize); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + int executorId = usersService.getUserIdByName(executorName); + + IPage processInstanceList = + processInstanceMapper.queryProcessInstanceListPaging(page, + project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end); + + List processInstances = processInstanceList.getRecords(); + + for (ProcessInstance processInstance : processInstances) { + processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime())); + User executor = usersService.queryUser(processInstance.getExecutorId()); + if (null != executor) { + processInstance.setExecutorName(executor.getUserName()); + } + } + + pageInfo.setTotalCount((int) processInstanceList.getTotal()); + pageInfo.setLists(processInstances); + result.put(DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query task list by process instance id + * + * @param loginUser login user + * @param projectName project name + * @param processId process instance id + * @return task list for the process instance + * @throws IOException io exception + */ + @Override + public Map queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); + List taskInstanceList = processService.findValidTaskListByProcessId(processId); + addDependResultForTaskList(taskInstanceList); + Map resultMap = new HashMap<>(); + resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); + resultMap.put(TASK_LIST, taskInstanceList); + result.put(DATA_LIST, resultMap); + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * add dependent result for dependent task + */ + private void addDependResultForTaskList(List taskInstanceList) throws IOException { + for (TaskInstance taskInstance : taskInstanceList) { + if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) { + Result logResult = loggerService.queryLog( + taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT); + if (logResult.getCode() == Status.SUCCESS.ordinal()) { + String log = logResult.getData(); + Map resultMap = parseLogForDependentResult(log); + taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap)); + } + } + } + } + + @Override + public Map parseLogForDependentResult(String log) throws IOException { + Map resultMap = new HashMap<>(); + if (StringUtils.isEmpty(log)) { + return resultMap; + } + + BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes( + StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); + String line; + while ((line = br.readLine()) != null) { + if (line.contains(DEPENDENT_SPLIT)) { + String[] tmpStringArray = line.split(":\\|\\|"); + if (tmpStringArray.length != 2) { + continue; + } + String dependResultString = tmpStringArray[1]; + String[] dependStringArray = dependResultString.split(","); + if (dependStringArray.length != 2) { + continue; + } + String key = dependStringArray[0].trim(); + DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim()); + resultMap.put(key, dependResult); + } + } + return resultMap; + } + + /** + * query sub process instance detail info by task id + * + * @param loginUser login user + * @param projectName project name + * @param taskId task id + * @return sub process instance detail + */ + @Override + public Map querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + TaskInstance taskInstance = processService.findTaskInstanceById(taskId); + if (taskInstance == null) { + putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); + return result; + } + if (!taskInstance.isSubProcess()) { + putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName()); + return result; + } + + ProcessInstance subWorkflowInstance = processService.findSubProcessInstance( + taskInstance.getProcessInstanceId(), taskInstance.getId()); + if (subWorkflowInstance == null) { + putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId); + return result; + } + Map dataMap = new HashMap<>(); + dataMap.put(Constants.SUBPROCESS_INSTANCE_ID, subWorkflowInstance.getId()); + result.put(DATA_LIST, dataMap); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * update process instance + * + * @param loginUser login user + * @param projectName project name + * @param processInstanceJson process instance json + * @param processInstanceId process instance id + * @param scheduleTime schedule time + * @param syncDefine sync define + * @param flag flag + * @param locations locations + * @param connects connects + * @return update result code + * @throws ParseException parse exception for json parse + */ + @Override + public Map updateProcessInstance(User loginUser, String projectName, Integer processInstanceId, + String processInstanceJson, String scheduleTime, Boolean syncDefine, + Flag flag, String locations, String connects) throws ParseException { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + //check project permission + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + //check process instance exists + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); + if (processInstance == null) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + return result; + } + + //check process instance status + if (!processInstance.getState().typeIsFinished()) { + putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, + processInstance.getName(), processInstance.getState().toString(), "update"); + return result; + } + Date schedule = null; + schedule = processInstance.getScheduleTime(); + if (scheduleTime != null) { + schedule = DateUtils.getScheduleDate(scheduleTime); + } + processInstance.setScheduleTime(schedule); + processInstance.setLocations(locations); + processInstance.setConnects(connects); + String globalParams = null; + String originDefParams = null; + int timeout = processInstance.getTimeout(); + ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); + if (StringUtils.isNotEmpty(processInstanceJson)) { + ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class); + //check workflow json is valid + Map checkFlowJson = processDefinitionService.checkProcessNodeList(processData, processInstanceJson); + if (checkFlowJson.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + + originDefParams = JSONUtils.toJsonString(processData.getGlobalParams()); + List globalParamList = processData.getGlobalParams(); + Map globalParamMap = Optional.ofNullable(globalParamList).orElse(Collections.emptyList()).stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); + globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, + processInstance.getCmdTypeIfComplement(), schedule); + timeout = processData.getTimeout(); + processInstance.setTimeout(timeout); + Tenant tenant = processService.getTenantForProcess(processData.getTenantId(), + processDefinition.getUserId()); + if (tenant != null) { + processInstance.setTenantCode(tenant.getTenantCode()); + } + // get the processinstancejson before saving,and then save the name and taskid + String oldJson = processInstance.getProcessInstanceJson(); + if (StringUtils.isNotEmpty(oldJson)) { + processInstanceJson = processService.changeJson(processData,oldJson); + } + processInstance.setProcessInstanceJson(processInstanceJson); + processInstance.setGlobalParams(globalParams); + } + + int update = processService.updateProcessInstance(processInstance); + int updateDefine = 1; + if (Boolean.TRUE.equals(syncDefine)) { + processDefinition.setProcessDefinitionJson(processInstanceJson); + processDefinition.setGlobalParams(originDefParams); + processDefinition.setLocations(locations); + processDefinition.setConnects(connects); + processDefinition.setTimeout(timeout); + processDefinition.setUpdateTime(new Date()); + + // add process definition version + long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition); + processDefinition.setVersion(version); + updateDefine = processDefineMapper.updateById(processDefinition); + } + if (update > 0 && updateDefine > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR); + } + + return result; + + } + + /** + * query parent process instance detail info by sub process instance id + * + * @param loginUser login user + * @param projectName project name + * @param subId sub process id + * @return parent instance detail + */ + @Override + public Map queryParentInstanceBySubId(User loginUser, String projectName, Integer subId) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId); + if (subInstance == null) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId); + return result; + } + if (subInstance.getIsSubProcess() == Flag.NO) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName()); + return result; + } + + ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId); + if (parentWorkflowInstance == null) { + putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST); + return result; + } + Map dataMap = new HashMap<>(); + dataMap.put(Constants.PARENT_WORKFLOW_INSTANCE, parentWorkflowInstance.getId()); + result.put(DATA_LIST, dataMap); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete process instance by id, at the same time,delete task instance and their mapping relation data + * + * @param loginUser login user + * @param projectName project name + * @param processInstanceId process instance id + * @return delete result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) { + + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); + if (null == processInstance) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + return result; + } + + processService.removeTaskLogFile(processInstanceId); + // delete database cascade + int delete = processService.deleteWorkProcessInstanceById(processInstanceId); + + processService.deleteAllSubWorkProcessByParentId(processInstanceId); + processService.deleteWorkProcessMapByParentId(processInstanceId); + + if (delete > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR); + } + + return result; + } + + /** + * view process instance variables + * + * @param processInstanceId process instance id + * @return variables data + */ + @Override + public Map viewVariables(Integer processInstanceId) { + Map result = new HashMap<>(); + + ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); + + if (processInstance == null) { + throw new RuntimeException("workflow instance is null"); + } + + Map timeParams = BusinessTimeUtils + .getBusinessTime(processInstance.getCmdTypeIfComplement(), + processInstance.getScheduleTime()); + + String workflowInstanceJson = processInstance.getProcessInstanceJson(); + + ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class); + + String userDefinedParams = processInstance.getGlobalParams(); + + // global params + List globalParams = new ArrayList<>(); + + if (userDefinedParams != null && userDefinedParams.length() > 0) { + globalParams = JSONUtils.toList(userDefinedParams, Property.class); + } + + List taskNodeList = workflowData.getTasks(); + + // global param string + String globalParamStr = JSONUtils.toJsonString(globalParams); + globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams); + globalParams = JSONUtils.toList(globalParamStr, Property.class); + for (Property property : globalParams) { + timeParams.put(property.getProp(), property.getValue()); + } + + // local params + Map> localUserDefParams = new HashMap<>(); + for (TaskNode taskNode : taskNodeList) { + String parameter = taskNode.getParams(); + Map map = JSONUtils.toMap(parameter); + String localParams = map.get(LOCAL_PARAMS); + if (localParams != null && !localParams.isEmpty()) { + localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams); + List localParamsList = JSONUtils.toList(localParams, Property.class); + + Map localParamsMap = new HashMap<>(); + localParamsMap.put(Constants.TASK_TYPE, taskNode.getType()); + localParamsMap.put(Constants.LOCAL_PARAMS_LIST, localParamsList); + if (CollectionUtils.isNotEmpty(localParamsList)) { + localUserDefParams.put(taskNode.getName(), localParamsMap); + } + } + + } + + Map resultMap = new HashMap<>(); + + resultMap.put(GLOBAL_PARAMS, globalParams); + resultMap.put(LOCAL_PARAMS, localUserDefParams); + + result.put(DATA_LIST, resultMap); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * encapsulation gantt structure + * + * @param processInstanceId process instance id + * @return gantt tree data + * @throws Exception exception when json parse + */ + @Override + public Map viewGantt(Integer processInstanceId) throws Exception { + Map result = new HashMap<>(); + + ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); + + if (processInstance == null) { + throw new RuntimeException("workflow instance is null"); + } + + GanttDto ganttDto = new GanttDto(); + + DAG dag = processInstance2DAG(processInstance); + //topological sort + List nodeList = dag.topologicalSort(); + + ganttDto.setTaskNames(nodeList); + + List taskList = new ArrayList<>(); + for (String node : nodeList) { + TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node); + if (taskInstance == null) { + continue; + } + Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); + Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); + Task task = new Task(); + task.setTaskName(taskInstance.getName()); + task.getStartDate().add(startTime.getTime()); + task.getEndDate().add(endTime.getTime()); + task.setIsoStart(startTime); + task.setIsoEnd(endTime); + task.setStatus(taskInstance.getState().toString()); + task.setExecutionDate(taskInstance.getStartTime()); + task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime())); + taskList.add(task); + } + ganttDto.setTasks(taskList); + + result.put(DATA_LIST, ganttDto); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * process instance to DAG + * + * @param processInstance input process instance + * @return process instance dag. + */ + private static DAG processInstance2DAG(ProcessInstance processInstance) { + + String processDefinitionJson = processInstance.getProcessInstanceJson(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + List taskNodeList = processData.getTasks(); + + ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); + + return DagHelper.buildDagGraph(processDag); + } + + /** + * query process instance by processDefinitionId and stateArray + * @param processDefinitionId processDefinitionId + * @param states states array + * @return process instance list + */ + @Override + public List queryByProcessDefineIdAndStatus(int processDefinitionId, int[] states) { + return processInstanceMapper.queryByProcessDefineIdAndStatus(processDefinitionId, states); + } + + /** + * query process instance by processDefinitionId + * @param processDefinitionId processDefinitionId + * @param size size + * @return process instance list + */ + @Override + public List queryByProcessDefineId(int processDefinitionId, int size) { + return processInstanceMapper.queryByProcessDefineId(processDefinitionId, size); + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java index 9241df6d71..81e0eebe2f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java @@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.api.service.impl; import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; @@ -48,10 +47,10 @@ import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** - * project service implement + * project service impl **/ @Service -public class ProjectServiceImpl extends BaseService implements ProjectService { +public class ProjectServiceImpl extends BaseServiceImpl implements ProjectService { @Autowired private ProjectMapper projectMapper; @@ -70,6 +69,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param desc description * @return returns an error if it exists */ + @Override public Map createProject(User loginUser, String name, String desc) { Map result = new HashMap<>(); @@ -112,6 +112,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param projectId project id * @return project detail information */ + @Override public Map queryById(Integer projectId) { Map result = new HashMap<>(); @@ -134,6 +135,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param projectName project name * @return true if the login user have permission to see the project */ + @Override public Map checkProjectAndAuth(User loginUser, Project project, String projectName) { Map result = new HashMap<>(); if (project == null) { @@ -147,6 +149,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { return result; } + @Override public boolean hasProjectAndPerm(User loginUser, Project project, Map result) { boolean checkResult = false; if (project == null) { @@ -168,6 +171,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param pageNo page number * @return project list which the login user have permission to see */ + @Override public Map queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) { Map result = new HashMap<>(); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); @@ -199,6 +203,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param projectId project id * @return delete result code */ + @Override public Map deleteProject(User loginUser, Integer projectId) { Map result = new HashMap<>(); Project project = projectMapper.selectById(projectId); @@ -253,6 +258,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param desc description * @return update result code */ + @Override public Map update(User loginUser, Integer projectId, String projectName, String desc) { Map result = new HashMap<>(); @@ -292,6 +298,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param userId user id * @return the projects which user have not permission to see */ + @Override public Map queryUnauthorizedProject(User loginUser, Integer userId) { Map result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { @@ -342,6 +349,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param userId user id * @return projects which the user have permission to see, Except for items created by this user */ + @Override public Map queryAuthorizedProject(User loginUser, Integer userId) { Map result = new HashMap<>(); @@ -362,6 +370,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param loginUser login user * @return projects which the user have permission to see, Except for items created by this user */ + @Override public Map queryProjectCreatedByUser(User loginUser) { Map result = new HashMap<>(); @@ -382,6 +391,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * @param loginUser login user * @return */ + @Override public Map queryProjectCreatedAndAuthorizedByUser(User loginUser) { Map result = new HashMap<>(); @@ -441,6 +451,7 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { * * @return project list */ + @Override public Map queryAllProjectList() { Map result = new HashMap<>(); List projects = new ArrayList<>(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java new file mode 100644 index 0000000000..002794dc56 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java @@ -0,0 +1,299 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.QueueService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.Queue; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.QueueMapper; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * queue service impl + */ +@Service +public class QueueServiceImpl extends BaseServiceImpl implements QueueService { + + private static final Logger logger = LoggerFactory.getLogger(QueueServiceImpl.class); + + @Autowired + private QueueMapper queueMapper; + + @Autowired + private UserMapper userMapper; + + /** + * query queue list + * + * @param loginUser login user + * @return queue list + */ + @Override + public Map queryList(User loginUser) { + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + + List queueList = queueMapper.selectList(null); + result.put(Constants.DATA_LIST, queueList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query queue list paging + * + * @param loginUser login user + * @param pageNo page number + * @param searchVal search value + * @param pageSize page size + * @return queue list + */ + @Override + public Map queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + + Page page = new Page<>(pageNo, pageSize); + + IPage queueList = queueMapper.queryQueuePaging(page, searchVal); + + Integer count = (int) queueList.getTotal(); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount(count); + pageInfo.setLists(queueList.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * create queue + * + * @param loginUser login user + * @param queue queue + * @param queueName queue name + * @return create result + */ + @Override + public Map createQueue(User loginUser, String queue, String queueName) { + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + + if (StringUtils.isEmpty(queue)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE); + return result; + } + + if (StringUtils.isEmpty(queueName)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME); + return result; + } + + if (checkQueueNameExist(queueName)) { + putMsg(result, Status.QUEUE_NAME_EXIST, queueName); + return result; + } + + if (checkQueueExist(queue)) { + putMsg(result, Status.QUEUE_VALUE_EXIST, queue); + return result; + } + + Queue queueObj = new Queue(); + Date now = new Date(); + + queueObj.setQueue(queue); + queueObj.setQueueName(queueName); + queueObj.setCreateTime(now); + queueObj.setUpdateTime(now); + + queueMapper.insert(queueObj); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * update queue + * + * @param loginUser login user + * @param queue queue + * @param id queue id + * @param queueName queue name + * @return update result code + */ + @Override + public Map updateQueue(User loginUser, int id, String queue, String queueName) { + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + + if (StringUtils.isEmpty(queue)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE); + return result; + } + + if (StringUtils.isEmpty(queueName)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME); + return result; + } + + Queue queueObj = queueMapper.selectById(id); + if (queueObj == null) { + putMsg(result, Status.QUEUE_NOT_EXIST, id); + return result; + } + + // whether queue value or queueName is changed + if (queue.equals(queueObj.getQueue()) && queueName.equals(queueObj.getQueueName())) { + putMsg(result, Status.NEED_NOT_UPDATE_QUEUE); + return result; + } + + // check queue name is exist + if (!queueName.equals(queueObj.getQueueName()) + && checkQueueNameExist(queueName)) { + putMsg(result, Status.QUEUE_NAME_EXIST, queueName); + return result; + } + + // check queue value is exist + if (!queue.equals(queueObj.getQueue()) && checkQueueExist(queue)) { + putMsg(result, Status.QUEUE_VALUE_EXIST, queue); + return result; + } + + // check old queue using by any user + if (checkIfQueueIsInUsing(queueObj.getQueueName(), queueName)) { + //update user related old queue + Integer relatedUserNums = userMapper.updateUserQueue(queueObj.getQueueName(), queueName); + logger.info("old queue have related {} user, exec update user success.", relatedUserNums); + } + + // update queue + Date now = new Date(); + queueObj.setQueue(queue); + queueObj.setQueueName(queueName); + queueObj.setUpdateTime(now); + + queueMapper.updateById(queueObj); + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify queue and queueName + * + * @param queue queue + * @param queueName queue name + * @return true if the queue name not exists, otherwise return false + */ + @Override + public Result verifyQueue(String queue, String queueName) { + Result result = new Result<>(); + + if (StringUtils.isEmpty(queue)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE); + return result; + } + + if (StringUtils.isEmpty(queueName)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME); + return result; + } + + if (checkQueueNameExist(queueName)) { + putMsg(result, Status.QUEUE_NAME_EXIST, queueName); + return result; + } + + if (checkQueueExist(queue)) { + putMsg(result, Status.QUEUE_VALUE_EXIST, queue); + return result; + } + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * check queue exist + * if exists return true,not exists return false + * check queue exist + * + * @param queue queue + * @return true if the queue not exists, otherwise return false + */ + private boolean checkQueueExist(String queue) { + return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(queue, null)); + } + + /** + * check queue name exist + * if exists return true,not exists return false + * + * @param queueName queue name + * @return true if the queue name not exists, otherwise return false + */ + private boolean checkQueueNameExist(String queueName) { + return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(null, queueName)); + } + + /** + * check old queue name using by any user + * if need to update user + * + * @param oldQueue old queue name + * @param newQueue new queue name + * @return true if need to update user + */ + private boolean checkIfQueueIsInUsing (String oldQueue, String newQueue) { + return !oldQueue.equals(newQueue) && CollectionUtils.isNotEmpty(userMapper.queryUserListByQueue(oldQueue)); + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java new file mode 100644 index 0000000000..f61996223e --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java @@ -0,0 +1,1330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import static org.apache.dolphinscheduler.common.Constants.ALIAS; +import static org.apache.dolphinscheduler.common.Constants.CONTENT; +import static org.apache.dolphinscheduler.common.Constants.JAR; + +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; +import org.apache.dolphinscheduler.api.dto.resources.filter.ResourceFilter; +import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor; +import org.apache.dolphinscheduler.api.dto.resources.visitor.Visitor; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.api.service.ResourcesService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.RegexUtils; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.dolphinscheduler.dao.entity.ResourcesUser; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.UdfFunc; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; +import org.apache.dolphinscheduler.dao.mapper.TenantMapper; +import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; + +import org.apache.commons.beanutils.BeanMap; + +import java.io.IOException; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.multipart.MultipartFile; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.fasterxml.jackson.databind.SerializationFeature; + +/** + * resources service impl + */ +@Service +public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesService { + + private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceImpl.class); + + @Autowired + private ResourceMapper resourcesMapper; + + @Autowired + private UdfFuncMapper udfFunctionMapper; + + @Autowired + private TenantMapper tenantMapper; + + @Autowired + private UserMapper userMapper; + + @Autowired + private ResourceUserMapper resourceUserMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + /** + * create directory + * + * @param loginUser login user + * @param name alias + * @param description description + * @param type type + * @param pid parent id + * @param currentDir current directory + * @return create directory result + */ + @Override + @Transactional(rollbackFor = Exception.class) + public Result createDirectory(User loginUser, + String name, + String description, + ResourceType type, + int pid, + String currentDir) { + Result result = checkResourceUploadStartupState(); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name); + result = verifyResource(loginUser, type, fullName, pid); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + if (checkResourceExists(fullName, 0, type.ordinal())) { + logger.error("resource directory {} has exist, can't recreate", fullName); + putMsg(result, Status.RESOURCE_EXIST); + return result; + } + + Date now = new Date(); + + Resource resource = new Resource(pid,name,fullName,true,description,name,loginUser.getId(),type,0,now,now); + + try { + resourcesMapper.insert(resource); + putMsg(result, Status.SUCCESS); + Map dataMap = new BeanMap(resource); + Map resultMap = new HashMap<>(); + for (Map.Entry entry: dataMap.entrySet()) { + if (!"class".equalsIgnoreCase(entry.getKey().toString())) { + resultMap.put(entry.getKey().toString(), entry.getValue()); + } + } + result.setData(resultMap); + } catch (DuplicateKeyException e) { + logger.error("resource directory {} has exist, can't recreate", fullName); + putMsg(result, Status.RESOURCE_EXIST); + return result; + } catch (Exception e) { + logger.error("resource already exists, can't recreate ", e); + throw new ServiceException("resource already exists, can't recreate"); + } + //create directory in hdfs + createDirectory(loginUser,fullName,type,result); + return result; + } + + /** + * create resource + * + * @param loginUser login user + * @param name alias + * @param desc description + * @param file file + * @param type type + * @param pid parent id + * @param currentDir current directory + * @return create result code + */ + @Override + @Transactional(rollbackFor = Exception.class) + public Result createResource(User loginUser, + String name, + String desc, + ResourceType type, + MultipartFile file, + int pid, + String currentDir) { + Result result = checkResourceUploadStartupState(); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + result = verifyPid(loginUser, pid); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + result = verifyFile(name, type, file); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + // check resource name exists + String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name); + if (checkResourceExists(fullName, 0, type.ordinal())) { + logger.error("resource {} has exist, can't recreate", RegexUtils.escapeNRT(name)); + putMsg(result, Status.RESOURCE_EXIST); + return result; + } + + Date now = new Date(); + Resource resource = new Resource(pid,name,fullName,false,desc,file.getOriginalFilename(),loginUser.getId(),type,file.getSize(),now,now); + + try { + resourcesMapper.insert(resource); + putMsg(result, Status.SUCCESS); + Map dataMap = new BeanMap(resource); + Map resultMap = new HashMap<>(); + for (Map.Entry entry: dataMap.entrySet()) { + if (!"class".equalsIgnoreCase(entry.getKey().toString())) { + resultMap.put(entry.getKey().toString(), entry.getValue()); + } + } + result.setData(resultMap); + } catch (Exception e) { + logger.error("resource already exists, can't recreate ", e); + throw new ServiceException("resource already exists, can't recreate"); + } + + // fail upload + if (!upload(loginUser, fullName, file, type)) { + logger.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(name), RegexUtils.escapeNRT(file.getOriginalFilename())); + putMsg(result, Status.HDFS_OPERATION_ERROR); + throw new ServiceException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); + } + return result; + } + + /** + * check resource is exists + * + * @param fullName fullName + * @param userId user id + * @param type type + * @return true if resource exists + */ + private boolean checkResourceExists(String fullName, int userId, int type) { + List resources = resourcesMapper.queryResourceList(fullName, userId, type); + return resources != null && !resources.isEmpty(); + } + + /** + * update resource + * @param loginUser login user + * @param resourceId resource id + * @param name name + * @param desc description + * @param type resource type + * @param file resource file + * @return update result code + */ + @Override + @Transactional(rollbackFor = Exception.class) + public Result updateResource(User loginUser, + int resourceId, + String name, + String desc, + ResourceType type, + MultipartFile file) { + Result result = checkResourceUploadStartupState(); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + Resource resource = resourcesMapper.selectById(resourceId); + if (resource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + if (!hasPerm(loginUser, resource.getUserId())) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + if (file == null && name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) { + putMsg(result, Status.SUCCESS); + return result; + } + + //check resource already exists + String originFullName = resource.getFullName(); + String originResourceName = resource.getAlias(); + + String fullName = String.format("%s%s",originFullName.substring(0,originFullName.lastIndexOf("/") + 1),name); + if (!originResourceName.equals(name) && checkResourceExists(fullName, 0, type.ordinal())) { + logger.error("resource {} already exists, can't recreate", name); + putMsg(result, Status.RESOURCE_EXIST); + return result; + } + + result = verifyFile(name, type, file); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + // query tenant by user id + String tenantCode = getTenantCode(resource.getUserId(),result); + if (StringUtils.isEmpty(tenantCode)) { + return result; + } + // verify whether the resource exists in storage + // get the path of origin file in storage + String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName); + try { + if (!HadoopUtils.getInstance().exists(originHdfsFileName)) { + logger.error("{} not exist", originHdfsFileName); + putMsg(result,Status.RESOURCE_NOT_EXIST); + return result; + } + } catch (IOException e) { + logger.error(e.getMessage(),e); + throw new ServiceException(Status.HDFS_OPERATION_ERROR); + } + + if (!resource.isDirectory()) { + //get the origin file suffix + String originSuffix = FileUtils.suffix(originFullName); + String suffix = FileUtils.suffix(fullName); + boolean suffixIsChanged = false; + if (StringUtils.isBlank(suffix) && StringUtils.isNotBlank(originSuffix)) { + suffixIsChanged = true; + } + if (StringUtils.isNotBlank(suffix) && !suffix.equals(originSuffix)) { + suffixIsChanged = true; + } + //verify whether suffix is changed + if (suffixIsChanged) { + //need verify whether this resource is authorized to other users + Map columnMap = new HashMap<>(); + columnMap.put("resources_id", resourceId); + + List resourcesUsers = resourceUserMapper.selectByMap(columnMap); + if (CollectionUtils.isNotEmpty(resourcesUsers)) { + List userIds = resourcesUsers.stream().map(ResourcesUser::getUserId).collect(Collectors.toList()); + List users = userMapper.selectBatchIds(userIds); + String userNames = users.stream().map(User::getUserName).collect(Collectors.toList()).toString(); + logger.error("resource is authorized to user {},suffix not allowed to be modified", userNames); + putMsg(result,Status.RESOURCE_IS_AUTHORIZED,userNames); + return result; + } + } + } + + // updateResource data + Date now = new Date(); + + resource.setAlias(name); + resource.setFullName(fullName); + resource.setDescription(desc); + resource.setUpdateTime(now); + if (file != null) { + resource.setFileName(file.getOriginalFilename()); + resource.setSize(file.getSize()); + } + + try { + resourcesMapper.updateById(resource); + if (resource.isDirectory()) { + List childrenResource = listAllChildren(resource,false); + if (CollectionUtils.isNotEmpty(childrenResource)) { + String matcherFullName = Matcher.quoteReplacement(fullName); + List childResourceList; + Integer[] childResIdArray = childrenResource.toArray(new Integer[childrenResource.size()]); + List resourceList = resourcesMapper.listResourceByIds(childResIdArray); + childResourceList = resourceList.stream().map(t -> { + t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName)); + t.setUpdateTime(now); + return t; + }).collect(Collectors.toList()); + resourcesMapper.batchUpdateResource(childResourceList); + + if (ResourceType.UDF.equals(resource.getType())) { + List udfFuncs = udfFunctionMapper.listUdfByResourceId(childResIdArray); + if (CollectionUtils.isNotEmpty(udfFuncs)) { + udfFuncs = udfFuncs.stream().map(t -> { + t.setResourceName(t.getResourceName().replaceFirst(originFullName, matcherFullName)); + t.setUpdateTime(now); + return t; + }).collect(Collectors.toList()); + udfFunctionMapper.batchUpdateUdfFunc(udfFuncs); + } + } + } + } else if (ResourceType.UDF.equals(resource.getType())) { + List udfFuncs = udfFunctionMapper.listUdfByResourceId(new Integer[]{resourceId}); + if (CollectionUtils.isNotEmpty(udfFuncs)) { + udfFuncs = udfFuncs.stream().map(t -> { + t.setResourceName(fullName); + t.setUpdateTime(now); + return t; + }).collect(Collectors.toList()); + udfFunctionMapper.batchUpdateUdfFunc(udfFuncs); + } + + } + + putMsg(result, Status.SUCCESS); + Map dataMap = new BeanMap(resource); + Map resultMap = new HashMap<>(); + for (Map.Entry entry: dataMap.entrySet()) { + if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { + resultMap.put(entry.getKey().toString(), entry.getValue()); + } + } + result.setData(resultMap); + } catch (Exception e) { + logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e); + throw new ServiceException(Status.UPDATE_RESOURCE_ERROR); + } + + // if name unchanged, return directly without moving on HDFS + if (originResourceName.equals(name) && file == null) { + return result; + } + + if (file != null) { + // fail upload + if (!upload(loginUser, fullName, file, type)) { + logger.error("upload resource: {} file: {} failed.", name, RegexUtils.escapeNRT(file.getOriginalFilename())); + putMsg(result, Status.HDFS_OPERATION_ERROR); + throw new ServiceException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); + } + if (!fullName.equals(originFullName)) { + try { + HadoopUtils.getInstance().delete(originHdfsFileName,false); + } catch (IOException e) { + logger.error(e.getMessage(),e); + throw new ServiceException(String.format("delete resource: %s failed.", originFullName)); + } + } + return result; + } + + // get the path of dest file in hdfs + String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName); + + try { + logger.info("start hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName); + HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true); + } catch (Exception e) { + logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e); + putMsg(result,Status.HDFS_COPY_FAIL); + throw new ServiceException(Status.HDFS_COPY_FAIL); + } + + return result; + } + + private Result verifyFile(String name, ResourceType type, MultipartFile file) { + Result result = new Result<>(); + putMsg(result, Status.SUCCESS); + if (file != null) { + // file is empty + if (file.isEmpty()) { + logger.error("file is empty: {}", RegexUtils.escapeNRT(file.getOriginalFilename())); + putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); + return result; + } + + // file suffix + String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); + String nameSuffix = FileUtils.suffix(name); + + // determine file suffix + if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { + // rename file suffix and original suffix must be consistent + logger.error("rename file suffix and original suffix must be consistent: {}", RegexUtils.escapeNRT(file.getOriginalFilename())); + putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); + return result; + } + + //If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar + if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(fileSuffix)) { + logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); + putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); + return result; + } + if (file.getSize() > Constants.MAX_FILE_SIZE) { + logger.error("file size is too large: {}", RegexUtils.escapeNRT(file.getOriginalFilename())); + putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); + return result; + } + } + return result; + } + + /** + * query resources list paging + * + * @param loginUser login user + * @param type resource type + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @return resource list page + */ + @Override + public Map queryResourceListPaging(User loginUser, int directoryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { + + HashMap result = new HashMap<>(); + Page page = new Page<>(pageNo, pageSize); + int userId = loginUser.getId(); + if (isAdmin(loginUser)) { + userId = 0; + } + if (directoryId != -1) { + Resource directory = resourcesMapper.selectById(directoryId); + if (directory == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + } + + IPage resourceIPage = resourcesMapper.queryResourcePaging(page, + userId,directoryId, type.ordinal(), searchVal); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int)resourceIPage.getTotal()); + pageInfo.setLists(resourceIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result,Status.SUCCESS); + return result; + } + + /** + * create directory + * @param loginUser login user + * @param fullName full name + * @param type resource type + * @param result Result + */ + private void createDirectory(User loginUser,String fullName,ResourceType type,Result result) { + String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); + String directoryName = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); + String resourceRootPath = HadoopUtils.getHdfsDir(type,tenantCode); + try { + if (!HadoopUtils.getInstance().exists(resourceRootPath)) { + createTenantDirIfNotExists(tenantCode); + } + + if (!HadoopUtils.getInstance().mkdir(directoryName)) { + logger.error("create resource directory {} of hdfs failed",directoryName); + putMsg(result,Status.HDFS_OPERATION_ERROR); + throw new ServiceException(String.format("create resource directory: %s failed.", directoryName)); + } + } catch (Exception e) { + logger.error("create resource directory {} of hdfs failed",directoryName); + putMsg(result,Status.HDFS_OPERATION_ERROR); + throw new ServiceException(String.format("create resource directory: %s failed.", directoryName)); + } + } + + /** + * upload file to hdfs + * + * @param loginUser login user + * @param fullName full name + * @param file file + */ + private boolean upload(User loginUser, String fullName, MultipartFile file, ResourceType type) { + // save to local + String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); + String nameSuffix = FileUtils.suffix(fullName); + + // determine file suffix + if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { + return false; + } + // query tenant + String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); + // random file name + String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); + + // save file to hdfs, and delete original file + String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); + String resourcePath = HadoopUtils.getHdfsDir(type,tenantCode); + try { + // if tenant dir not exists + if (!HadoopUtils.getInstance().exists(resourcePath)) { + createTenantDirIfNotExists(tenantCode); + } + org.apache.dolphinscheduler.api.utils.FileUtils.copyFile(file, localFilename); + HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true); + } catch (Exception e) { + logger.error(e.getMessage(), e); + return false; + } + return true; + } + + /** + * query resource list + * + * @param loginUser login user + * @param type resource type + * @return resource list + */ + @Override + public Map queryResourceList(User loginUser, ResourceType type) { + Map result = new HashMap<>(); + + int userId = loginUser.getId(); + if (isAdmin(loginUser)) { + userId = 0; + } + List allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0); + Visitor resourceTreeVisitor = new ResourceTreeVisitor(allResourceList); + result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); + putMsg(result,Status.SUCCESS); + + return result; + } + + /** + * query resource list by program type + * + * @param loginUser login user + * @param type resource type + * @return resource list + */ + @Override + public Map queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType) { + Map result = new HashMap<>(); + String suffix = ".jar"; + int userId = loginUser.getId(); + if (isAdmin(loginUser)) { + userId = 0; + } + if (programType != null) { + switch (programType) { + case JAVA: + case SCALA: + break; + case PYTHON: + suffix = ".py"; + break; + default: + } + } + List allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0); + List resources = new ResourceFilter(suffix,new ArrayList<>(allResourceList)).filter(); + Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources); + result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); + putMsg(result,Status.SUCCESS); + + return result; + } + + /** + * delete resource + * + * @param loginUser login user + * @param resourceId resource id + * @return delete result code + * @throws IOException exception + */ + @Override + @Transactional(rollbackFor = Exception.class) + public Result delete(User loginUser, int resourceId) throws IOException { + Result result = checkResourceUploadStartupState(); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + // get resource by id + Resource resource = resourcesMapper.selectById(resourceId); + if (resource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + if (!hasPerm(loginUser, resource.getUserId())) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + String tenantCode = getTenantCode(resource.getUserId(),result); + if (StringUtils.isEmpty(tenantCode)) { + return result; + } + + // get all resource id of process definitions those is released + List> list = processDefinitionMapper.listResources(); + Map> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list); + Set resourceIdSet = resourceProcessMap.keySet(); + // get all children of the resource + List allChildren = listAllChildren(resource,true); + Integer[] needDeleteResourceIdArray = allChildren.toArray(new Integer[allChildren.size()]); + + //if resource type is UDF,need check whether it is bound by UDF function + if (resource.getType() == (ResourceType.UDF)) { + List udfFuncs = udfFunctionMapper.listUdfByResourceId(needDeleteResourceIdArray); + if (CollectionUtils.isNotEmpty(udfFuncs)) { + logger.error("can't be deleted,because it is bound by UDF functions:{}", udfFuncs); + putMsg(result,Status.UDF_RESOURCE_IS_BOUND,udfFuncs.get(0).getFuncName()); + return result; + } + } + + if (resourceIdSet.contains(resource.getPid())) { + logger.error("can't be deleted,because it is used of process definition"); + putMsg(result, Status.RESOURCE_IS_USED); + return result; + } + resourceIdSet.retainAll(allChildren); + if (CollectionUtils.isNotEmpty(resourceIdSet)) { + logger.error("can't be deleted,because it is used of process definition"); + for (Integer resId : resourceIdSet) { + logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId)); + } + putMsg(result, Status.RESOURCE_IS_USED); + return result; + } + + // get hdfs file by type + String hdfsFilename = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName()); + + //delete data in database + resourcesMapper.deleteIds(needDeleteResourceIdArray); + resourceUserMapper.deleteResourceUserArray(0, needDeleteResourceIdArray); + + //delete file on hdfs + HadoopUtils.getInstance().delete(hdfsFilename, true); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify resource by name and type + * @param loginUser login user + * @param fullName resource full name + * @param type resource type + * @return true if the resource name not exists, otherwise return false + */ + @Override + public Result verifyResourceName(String fullName, ResourceType type, User loginUser) { + Result result = new Result<>(); + putMsg(result, Status.SUCCESS); + if (checkResourceExists(fullName, 0, type.ordinal())) { + logger.error("resource type:{} name:{} has exist, can't create again.", type, RegexUtils.escapeNRT(fullName)); + putMsg(result, Status.RESOURCE_EXIST); + } else { + // query tenant + Tenant tenant = tenantMapper.queryById(loginUser.getTenantId()); + if (tenant != null) { + String tenantCode = tenant.getTenantCode(); + + try { + String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); + if (HadoopUtils.getInstance().exists(hdfsFilename)) { + logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, RegexUtils.escapeNRT(fullName), hdfsFilename); + putMsg(result, Status.RESOURCE_FILE_EXIST,hdfsFilename); + } + + } catch (Exception e) { + logger.error(e.getMessage(),e); + putMsg(result,Status.HDFS_OPERATION_ERROR); + } + } else { + putMsg(result,Status.TENANT_NOT_EXIST); + } + } + + return result; + } + + /** + * verify resource by full name or pid and type + * @param fullName resource full name + * @param id resource id + * @param type resource type + * @return true if the resource full name or pid not exists, otherwise return false + */ + @Override + public Result queryResource(String fullName, Integer id, ResourceType type) { + Result result = new Result<>(); + if (StringUtils.isBlank(fullName) && id == null) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + return result; + } + if (StringUtils.isNotBlank(fullName)) { + List resourceList = resourcesMapper.queryResource(fullName,type.ordinal()); + if (CollectionUtils.isEmpty(resourceList)) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + putMsg(result, Status.SUCCESS); + result.setData(resourceList.get(0)); + } else { + Resource resource = resourcesMapper.selectById(id); + if (resource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + Resource parentResource = resourcesMapper.selectById(resource.getPid()); + if (parentResource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + putMsg(result, Status.SUCCESS); + result.setData(parentResource); + } + return result; + } + + /** + * view resource file online + * + * @param resourceId resource id + * @param skipLineNum skip line number + * @param limit limit + * @return resource content + */ + @Override + public Result readResource(int resourceId, int skipLineNum, int limit) { + Result result = checkResourceUploadStartupState(); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + // get resource by id + Resource resource = resourcesMapper.selectById(resourceId); + if (resource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + //check preview or not by file suffix + String nameSuffix = FileUtils.suffix(resource.getAlias()); + String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); + if (StringUtils.isNotEmpty(resourceViewSuffixs)) { + List strList = Arrays.asList(resourceViewSuffixs.split(",")); + if (!strList.contains(nameSuffix)) { + logger.error("resource suffix {} not support view, resource id {}", nameSuffix, resourceId); + putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); + return result; + } + } + + String tenantCode = getTenantCode(resource.getUserId(),result); + if (StringUtils.isEmpty(tenantCode)) { + return result; + } + + // hdfs path + String hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resource.getFullName()); + logger.info("resource hdfs path is {}", hdfsFileName); + try { + if (HadoopUtils.getInstance().exists(hdfsFileName)) { + List content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit); + + putMsg(result, Status.SUCCESS); + Map map = new HashMap<>(); + map.put(ALIAS, resource.getAlias()); + map.put(CONTENT, String.join("\n", content)); + result.setData(map); + } else { + logger.error("read file {} not exist in hdfs", hdfsFileName); + putMsg(result, Status.RESOURCE_FILE_NOT_EXIST,hdfsFileName); + } + + } catch (Exception e) { + logger.error("Resource {} read failed", hdfsFileName, e); + putMsg(result, Status.HDFS_OPERATION_ERROR); + } + + return result; + } + + /** + * create resource file online + * + * @param loginUser login user + * @param type resource type + * @param fileName file name + * @param fileSuffix file suffix + * @param desc description + * @param content content + * @param pid pid + * @param currentDir current directory + * @return create result code + */ + @Override + @Transactional(rollbackFor = Exception.class) + public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDir) { + Result result = checkResourceUploadStartupState(); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + //check file suffix + String nameSuffix = fileSuffix.trim(); + String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); + if (StringUtils.isNotEmpty(resourceViewSuffixs)) { + List strList = Arrays.asList(resourceViewSuffixs.split(",")); + if (!strList.contains(nameSuffix)) { + logger.error("resource suffix {} not support create", nameSuffix); + putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); + return result; + } + } + + String name = fileName.trim() + "." + nameSuffix; + String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name); + result = verifyResource(loginUser, type, fullName, pid); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + // save data + Date now = new Date(); + Resource resource = new Resource(pid,name,fullName,false,desc,name,loginUser.getId(),type,content.getBytes().length,now,now); + + resourcesMapper.insert(resource); + + putMsg(result, Status.SUCCESS); + Map dataMap = new BeanMap(resource); + Map resultMap = new HashMap<>(); + for (Map.Entry entry: dataMap.entrySet()) { + if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { + resultMap.put(entry.getKey().toString(), entry.getValue()); + } + } + result.setData(resultMap); + + String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); + + result = uploadContentToHdfs(fullName, tenantCode, content); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + throw new ServiceException(result.getMsg()); + } + return result; + } + + private Result checkResourceUploadStartupState() { + Result result = new Result<>(); + putMsg(result, Status.SUCCESS); + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()) { + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + return result; + } + + private Result verifyResource(User loginUser, ResourceType type, String fullName, int pid) { + Result result = verifyResourceName(fullName, type, loginUser); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + return verifyPid(loginUser, pid); + } + + private Result verifyPid(User loginUser, int pid) { + Result result = new Result<>(); + putMsg(result, Status.SUCCESS); + if (pid != -1) { + Resource parentResource = resourcesMapper.selectById(pid); + if (parentResource == null) { + putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); + return result; + } + if (!hasPerm(loginUser, parentResource.getUserId())) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + } + return result; + } + + /** + * updateProcessInstance resource + * + * @param resourceId resource id + * @param content content + * @return update result cod + */ + @Override + @Transactional(rollbackFor = Exception.class) + public Result updateResourceContent(int resourceId, String content) { + Result result = checkResourceUploadStartupState(); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + Resource resource = resourcesMapper.selectById(resourceId); + if (resource == null) { + logger.error("read file not exist, resource id {}", resourceId); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + //check can edit by file suffix + String nameSuffix = FileUtils.suffix(resource.getAlias()); + String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); + if (StringUtils.isNotEmpty(resourceViewSuffixs)) { + List strList = Arrays.asList(resourceViewSuffixs.split(",")); + if (!strList.contains(nameSuffix)) { + logger.error("resource suffix {} not support updateProcessInstance, resource id {}", nameSuffix, resourceId); + putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); + return result; + } + } + + String tenantCode = getTenantCode(resource.getUserId(),result); + if (StringUtils.isEmpty(tenantCode)) { + return result; + } + resource.setSize(content.getBytes().length); + resource.setUpdateTime(new Date()); + resourcesMapper.updateById(resource); + + result = uploadContentToHdfs(resource.getFullName(), tenantCode, content); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + throw new ServiceException(result.getMsg()); + } + return result; + } + + /** + * @param resourceName resource name + * @param tenantCode tenant code + * @param content content + * @return result + */ + private Result uploadContentToHdfs(String resourceName, String tenantCode, String content) { + Result result = new Result<>(); + String localFilename = ""; + String hdfsFileName = ""; + try { + localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); + + if (!FileUtils.writeContent2File(content, localFilename)) { + // write file fail + logger.error("file {} fail, content is {}", localFilename, RegexUtils.escapeNRT(content)); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + + // get resource file hdfs path + hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resourceName); + String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); + logger.info("resource hdfs path is {}, resource dir is {}", hdfsFileName, resourcePath); + + HadoopUtils hadoopUtils = HadoopUtils.getInstance(); + if (!hadoopUtils.exists(resourcePath)) { + // create if tenant dir not exists + createTenantDirIfNotExists(tenantCode); + } + if (hadoopUtils.exists(hdfsFileName)) { + hadoopUtils.delete(hdfsFileName, false); + } + + hadoopUtils.copyLocalToHdfs(localFilename, hdfsFileName, true, true); + } catch (Exception e) { + logger.error(e.getMessage(), e); + result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); + result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, hdfsFileName)); + return result; + } + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * download file + * + * @param resourceId resource id + * @return resource content + * @throws IOException exception + */ + @Override + public org.springframework.core.io.Resource downloadResource(int resourceId) throws IOException { + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()) { + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + throw new ServiceException("hdfs not startup"); + } + + Resource resource = resourcesMapper.selectById(resourceId); + if (resource == null) { + logger.error("download file not exist, resource id {}", resourceId); + return null; + } + if (resource.isDirectory()) { + logger.error("resource id {} is directory,can't download it", resourceId); + throw new ServiceException("can't download directory"); + } + + int userId = resource.getUserId(); + User user = userMapper.selectById(userId); + if (user == null) { + logger.error("user id {} not exists", userId); + throw new ServiceException(String.format("resource owner id %d not exist",userId)); + } + + Tenant tenant = tenantMapper.queryById(user.getTenantId()); + if (tenant == null) { + logger.error("tenant id {} not exists", user.getTenantId()); + throw new ServiceException(String.format("The tenant id %d of resource owner not exist",user.getTenantId())); + } + + String tenantCode = tenant.getTenantCode(); + + String hdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName()); + + String localFileName = FileUtils.getDownloadFilename(resource.getAlias()); + logger.info("resource hdfs path is {}, download local filename is {}", hdfsFileName, localFileName); + + HadoopUtils.getInstance().copyHdfsToLocal(hdfsFileName, localFileName, false, true); + return org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName); + } + + /** + * list all file + * + * @param loginUser login user + * @param userId user id + * @return unauthorized result code + */ + @Override + public Map authorizeResourceTree(User loginUser, Integer userId) { + + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + List resourceList = resourcesMapper.queryResourceExceptUserId(userId); + List list; + if (CollectionUtils.isNotEmpty(resourceList)) { + Visitor visitor = new ResourceTreeVisitor(resourceList); + list = visitor.visit().getChildren(); + } else { + list = new ArrayList<>(0); + } + + result.put(Constants.DATA_LIST, list); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * unauthorized file + * + * @param loginUser login user + * @param userId user id + * @return unauthorized result code + */ + @Override + public Map unauthorizedFile(User loginUser, Integer userId) { + + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + List resourceList = resourcesMapper.queryResourceExceptUserId(userId); + List list; + if (resourceList != null && !resourceList.isEmpty()) { + Set resourceSet = new HashSet<>(resourceList); + List authedResourceList = resourcesMapper.queryAuthorizedResourceList(userId); + + getAuthorizedResourceList(resourceSet, authedResourceList); + list = new ArrayList<>(resourceSet); + } else { + list = new ArrayList<>(0); + } + Visitor visitor = new ResourceTreeVisitor(list); + result.put(Constants.DATA_LIST, visitor.visit().getChildren()); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * unauthorized udf function + * + * @param loginUser login user + * @param userId user id + * @return unauthorized result code + */ + @Override + public Map unauthorizedUDFFunction(User loginUser, Integer userId) { + Map result = new HashMap<>(); + //only admin can operate + if (isNotAdmin(loginUser, result)) { + return result; + } + + List udfFuncList = udfFunctionMapper.queryUdfFuncExceptUserId(userId); + List resultList = new ArrayList<>(); + Set udfFuncSet; + if (CollectionUtils.isNotEmpty(udfFuncList)) { + udfFuncSet = new HashSet<>(udfFuncList); + + List authedUDFFuncList = udfFunctionMapper.queryAuthedUdfFunc(userId); + + getAuthorizedResourceList(udfFuncSet, authedUDFFuncList); + resultList = new ArrayList<>(udfFuncSet); + } + result.put(Constants.DATA_LIST, resultList); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * authorized udf function + * + * @param loginUser login user + * @param userId user id + * @return authorized result code + */ + @Override + public Map authorizedUDFFunction(User loginUser, Integer userId) { + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + List udfFuncs = udfFunctionMapper.queryAuthedUdfFunc(userId); + result.put(Constants.DATA_LIST, udfFuncs); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * authorized file + * + * @param loginUser login user + * @param userId user id + * @return authorized result + */ + @Override + public Map authorizedFile(User loginUser, Integer userId) { + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + List authedResources = resourcesMapper.queryAuthorizedResourceList(userId); + Visitor visitor = new ResourceTreeVisitor(authedResources); + String visit = JSONUtils.toJsonString(visitor.visit(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); + logger.info(visit); + String jsonTreeStr = JSONUtils.toJsonString(visitor.visit().getChildren(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); + logger.info(jsonTreeStr); + result.put(Constants.DATA_LIST, visitor.visit().getChildren()); + putMsg(result,Status.SUCCESS); + return result; + } + + /** + * get authorized resource list + * + * @param resourceSet resource set + * @param authedResourceList authorized resource list + */ + private void getAuthorizedResourceList(Set resourceSet, List authedResourceList) { + Set authedResourceSet; + if (CollectionUtils.isNotEmpty(authedResourceList)) { + authedResourceSet = new HashSet<>(authedResourceList); + resourceSet.removeAll(authedResourceSet); + } + } + + /** + * get tenantCode by UserId + * + * @param userId user id + * @param result return result + * @return tenant code + */ + private String getTenantCode(int userId,Result result) { + User user = userMapper.selectById(userId); + if (user == null) { + logger.error("user {} not exists", userId); + putMsg(result, Status.USER_NOT_EXIST,userId); + return null; + } + + Tenant tenant = tenantMapper.queryById(user.getTenantId()); + if (tenant == null) { + logger.error("tenant not exists"); + putMsg(result, Status.TENANT_NOT_EXIST); + return null; + } + return tenant.getTenantCode(); + } + + /** + * list all children id + * @param resource resource + * @param containSelf whether add self to children list + * @return all children id + */ + List listAllChildren(Resource resource,boolean containSelf) { + List childList = new ArrayList<>(); + if (resource.getId() != -1 && containSelf) { + childList.add(resource.getId()); + } + + if (resource.isDirectory()) { + listAllChildren(resource.getId(),childList); + } + return childList; + } + + /** + * list all children id + * @param resourceId resource id + * @param childList child list + */ + void listAllChildren(int resourceId,List childList) { + List children = resourcesMapper.listChildren(resourceId); + for (int childId : children) { + childList.add(childId); + listAllChildren(childId, childList); + } + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java new file mode 100644 index 0000000000..5955a447e9 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java @@ -0,0 +1,607 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.dto.ScheduleParam; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.api.service.ExecutorService; +import org.apache.dolphinscheduler.api.service.MonitorService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.service.SchedulerService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.model.Server; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; +import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; +import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; + +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.quartz.CronExpression; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * scheduler service impl + */ +@Service +public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerService { + + private static final Logger logger = LoggerFactory.getLogger(SchedulerServiceImpl.class); + + @Autowired + private ProjectService projectService; + + @Autowired + private ExecutorService executorService; + + @Autowired + private MonitorService monitorService; + + @Autowired + private ProcessService processService; + + @Autowired + private ScheduleMapper scheduleMapper; + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + /** + * save schedule + * + * @param loginUser login user + * @param projectName project name + * @param processDefineId process definition id + * @param schedule scheduler + * @param warningType warning type + * @param warningGroupId warning group id + * @param failureStrategy failure strategy + * @param processInstancePriority process instance priority + * @param workerGroup worker group + * @return create result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map insertSchedule(User loginUser, String projectName, + Integer processDefineId, + String schedule, + WarningType warningType, + int warningGroupId, + FailureStrategy failureStrategy, + Priority processInstancePriority, + String workerGroup) { + + Map result = new HashMap<>(); + + Project project = projectMapper.queryByName(projectName); + + // check project auth + boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); + if (!hasProjectAndPerm) { + return result; + } + + // check work flow define release state + ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); + result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + + Schedule scheduleObj = new Schedule(); + Date now = new Date(); + + scheduleObj.setProjectName(projectName); + scheduleObj.setProcessDefinitionId(processDefinition.getId()); + scheduleObj.setProcessDefinitionName(processDefinition.getName()); + + ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); + if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { + logger.warn("The start time must not be the same as the end"); + putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); + return result; + } + scheduleObj.setStartTime(scheduleParam.getStartTime()); + scheduleObj.setEndTime(scheduleParam.getEndTime()); + if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { + logger.error("{} verify failure", scheduleParam.getCrontab()); + + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); + return result; + } + scheduleObj.setCrontab(scheduleParam.getCrontab()); + scheduleObj.setWarningType(warningType); + scheduleObj.setWarningGroupId(warningGroupId); + scheduleObj.setFailureStrategy(failureStrategy); + scheduleObj.setCreateTime(now); + scheduleObj.setUpdateTime(now); + scheduleObj.setUserId(loginUser.getId()); + scheduleObj.setUserName(loginUser.getUserName()); + scheduleObj.setReleaseState(ReleaseState.OFFLINE); + scheduleObj.setProcessInstancePriority(processInstancePriority); + scheduleObj.setWorkerGroup(workerGroup); + scheduleMapper.insert(scheduleObj); + + /** + * updateProcessInstance receivers and cc by process definition id + */ + processDefinition.setWarningGroupId(warningGroupId); + processDefinitionMapper.updateById(processDefinition); + + // return scheduler object with ID + result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId())); + putMsg(result, Status.SUCCESS); + + result.put("scheduleId", scheduleObj.getId()); + return result; + } + + /** + * updateProcessInstance schedule + * + * @param loginUser login user + * @param projectName project name + * @param id scheduler id + * @param scheduleExpression scheduler + * @param warningType warning type + * @param warningGroupId warning group id + * @param failureStrategy failure strategy + * @param workerGroup worker group + * @param processInstancePriority process instance priority + * @param scheduleStatus schedule status + * @return update result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map updateSchedule(User loginUser, + String projectName, + Integer id, + String scheduleExpression, + WarningType warningType, + int warningGroupId, + FailureStrategy failureStrategy, + ReleaseState scheduleStatus, + Priority processInstancePriority, + String workerGroup) { + Map result = new HashMap<>(); + + Project project = projectMapper.queryByName(projectName); + + // check project auth + boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); + if (!hasProjectAndPerm) { + return result; + } + + // check schedule exists + Schedule schedule = scheduleMapper.selectById(id); + + if (schedule == null) { + putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); + return result; + } + + ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId()); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); + return result; + } + + /** + * scheduling on-line status forbid modification + */ + if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { + return result; + } + + Date now = new Date(); + + // updateProcessInstance param + if (StringUtils.isNotEmpty(scheduleExpression)) { + ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); + if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { + logger.warn("The start time must not be the same as the end"); + putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); + return result; + } + schedule.setStartTime(scheduleParam.getStartTime()); + schedule.setEndTime(scheduleParam.getEndTime()); + if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { + putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); + return result; + } + schedule.setCrontab(scheduleParam.getCrontab()); + } + + if (warningType != null) { + schedule.setWarningType(warningType); + } + + schedule.setWarningGroupId(warningGroupId); + + if (failureStrategy != null) { + schedule.setFailureStrategy(failureStrategy); + } + + if (scheduleStatus != null) { + schedule.setReleaseState(scheduleStatus); + } + schedule.setWorkerGroup(workerGroup); + schedule.setUpdateTime(now); + schedule.setProcessInstancePriority(processInstancePriority); + scheduleMapper.updateById(schedule); + + /** + * updateProcessInstance recipients and cc by process definition ID + */ + processDefinition.setWarningGroupId(warningGroupId); + + processDefinitionMapper.updateById(processDefinition); + + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * set schedule online or offline + * + * @param loginUser login user + * @param projectName project name + * @param id scheduler id + * @param scheduleStatus schedule status + * @return publish result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map setScheduleState(User loginUser, + String projectName, + Integer id, + ReleaseState scheduleStatus) { + Map result = new HashMap<>(); + + Project project = projectMapper.queryByName(projectName); + // check project auth + boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); + if (!hasProjectAndPerm) { + return result; + } + + // check schedule exists + Schedule scheduleObj = scheduleMapper.selectById(id); + + if (scheduleObj == null) { + putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); + return result; + } + // check schedule release state + if (scheduleObj.getReleaseState() == scheduleStatus) { + logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", + scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); + putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); + return result; + } + ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId()); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); + return result; + } + + if (scheduleStatus == ReleaseState.ONLINE) { + // check process definition release state + if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { + logger.info("not release process definition id: {} , name : {}", + processDefinition.getId(), processDefinition.getName()); + putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); + return result; + } + // check sub process definition release state + List subProcessDefineIds = new ArrayList<>(); + processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); + Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); + if (!subProcessDefineIds.isEmpty()) { + List subProcessDefinitionList = + processDefinitionMapper.queryDefinitionListByIdList(idArray); + if (subProcessDefinitionList != null && !subProcessDefinitionList.isEmpty()) { + for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) { + /** + * if there is no online process, exit directly + */ + if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) { + logger.info("not release process definition id: {} , name : {}", + subProcessDefinition.getId(), subProcessDefinition.getName()); + putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); + return result; + } + } + } + } + } + + // check master server exists + List masterServers = monitorService.getServerListFromZK(true); + + if (masterServers.isEmpty()) { + putMsg(result, Status.MASTER_NOT_EXISTS); + return result; + } + + // set status + scheduleObj.setReleaseState(scheduleStatus); + + scheduleMapper.updateById(scheduleObj); + + try { + switch (scheduleStatus) { + case ONLINE: + logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); + setSchedule(project.getId(), scheduleObj); + break; + case OFFLINE: + logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); + deleteSchedule(project.getId(), id); + break; + default: + putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); + return result; + } + } catch (Exception e) { + result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); + throw new ServiceException(result.get(Constants.MSG).toString()); + } + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query schedule + * + * @param loginUser login user + * @param projectName project name + * @param processDefineId process definition id + * @param pageNo page number + * @param pageSize page size + * @param searchVal search value + * @return schedule list page + */ + @Override + public Map querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) { + + HashMap result = new HashMap<>(); + + Project project = projectMapper.queryByName(projectName); + + // check project auth + boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); + if (!hasProjectAndPerm) { + return result; + } + + ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); + return result; + } + Page page = new Page<>(pageNo, pageSize); + IPage scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging( + page, processDefineId, searchVal + ); + + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int) scheduleIPage.getTotal()); + pageInfo.setLists(scheduleIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query schedule list + * + * @param loginUser login user + * @param projectName project name + * @return schedule list + */ + @Override + public Map queryScheduleList(User loginUser, String projectName) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + // check project auth + boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); + if (!hasProjectAndPerm) { + return result; + } + + List schedules = scheduleMapper.querySchedulerListByProjectName(projectName); + + result.put(Constants.DATA_LIST, schedules); + putMsg(result, Status.SUCCESS); + + return result; + } + + public void setSchedule(int projectId, Schedule schedule) { + int scheduleId = schedule.getId(); + logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId); + + Date startDate = schedule.getStartTime(); + Date endDate = schedule.getEndTime(); + + String jobName = QuartzExecutors.buildJobName(scheduleId); + String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); + + Map dataMap = QuartzExecutors.buildDataMap(projectId, scheduleId, schedule); + + QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, jobName, jobGroupName, startDate, endDate, + schedule.getCrontab(), dataMap); + + } + + /** + * delete schedule + * + * @param projectId project id + * @param scheduleId schedule id + * @throws RuntimeException runtime exception + */ + @Override + public void deleteSchedule(int projectId, int scheduleId) { + logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId); + + String jobName = QuartzExecutors.buildJobName(scheduleId); + String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); + + if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) { + logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId); + throw new ServiceException("set offline failure"); + } + + } + + /** + * check valid + * + * @param result result + * @param bool bool + * @param status status + * @return check result code + */ + private boolean checkValid(Map result, boolean bool, Status status) { + // timeout is valid + if (bool) { + putMsg(result, status); + return true; + } + return false; + } + + /** + * delete schedule by id + * + * @param loginUser login user + * @param projectName project name + * @param scheduleId scheule id + * @return delete result code + */ + @Override + public Map deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { + + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + Schedule schedule = scheduleMapper.selectById(scheduleId); + + if (schedule == null) { + putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); + return result; + } + + // Determine if the login user is the owner of the schedule + if (loginUser.getId() != schedule.getUserId() + && loginUser.getUserType() != UserType.ADMIN_USER) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + // check schedule is already online + if (schedule.getReleaseState() == ReleaseState.ONLINE) { + putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); + return result; + } + + int delete = scheduleMapper.deleteById(scheduleId); + + if (delete > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); + } + return result; + } + + /** + * preview schedule + * + * @param loginUser login user + * @param projectName project name + * @param schedule schedule expression + * @return the next five fire time + */ + @Override + public Map previewSchedule(User loginUser, String projectName, String schedule) { + Map result = new HashMap<>(); + CronExpression cronExpression; + ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); + Date now = new Date(); + + Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime(); + Date endTime = scheduleParam.getEndTime(); + try { + cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab()); + } catch (ParseException e) { + logger.error(e.getMessage(), e); + putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); + return result; + } + List selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime, cronExpression, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT); + result.put(Constants.DATA_LIST, selfFireDateList.stream().map(DateUtils::dateToString)); + putMsg(result, Status.SUCCESS); + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java index 917ebdf3a8..9dc6a6d10b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java @@ -17,22 +17,22 @@ package org.apache.dolphinscheduler.api.service.impl; -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; - -import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.api.controller.BaseController; -import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.service.SessionService; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.Session; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.SessionMapper; + +import java.util.Date; +import java.util.List; +import java.util.UUID; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -44,7 +44,7 @@ import org.springframework.web.util.WebUtils; * session service implement */ @Service -public class SessionServiceImpl extends BaseService implements SessionService { +public class SessionServiceImpl extends BaseServiceImpl implements SessionService { private static final Logger logger = LoggerFactory.getLogger(SessionService.class); @@ -57,6 +57,7 @@ public class SessionServiceImpl extends BaseService implements SessionService { * @param request request * @return session */ + @Override public Session getSession(HttpServletRequest request) { String sessionId = request.getHeader(Constants.SESSION_ID); @@ -85,6 +86,7 @@ public class SessionServiceImpl extends BaseService implements SessionService { * @param ip ip * @return session string */ + @Override @Transactional(rollbackFor = RuntimeException.class) public String createSession(User user, String ip) { Session session = null; @@ -142,6 +144,7 @@ public class SessionServiceImpl extends BaseService implements SessionService { * @param ip no use * @param loginUser login user */ + @Override public void signOut(String ip, User loginUser) { try { /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java new file mode 100644 index 0000000000..926ffa0c2c --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java @@ -0,0 +1,209 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ProcessInstanceService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.service.TaskInstanceService; +import org.apache.dolphinscheduler.api.service.UsersService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.text.MessageFormat; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * task instance service impl + */ +@Service +public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInstanceService { + + @Autowired + ProjectMapper projectMapper; + + @Autowired + ProjectService projectService; + + @Autowired + ProcessService processService; + + @Autowired + TaskInstanceMapper taskInstanceMapper; + + @Autowired + ProcessInstanceService processInstanceService; + + @Autowired + UsersService usersService; + + /** + * query task list by project, process instance, task name, task start time, task end time, task status, keyword paging + * + * @param loginUser login user + * @param projectName project name + * @param processInstanceId process instance id + * @param searchVal search value + * @param taskName task name + * @param stateType state type + * @param host host + * @param startDate start time + * @param endDate end time + * @param pageNo page number + * @param pageSize page size + * @return task list page + */ + @Override + public Map queryTaskListPaging(User loginUser, String projectName, + Integer processInstanceId, String processInstanceName, String taskName, String executorName, String startDate, + String endDate, String searchVal, ExecutionStatus stateType, String host, + Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status status = (Status) checkResult.get(Constants.STATUS); + if (status != Status.SUCCESS) { + return checkResult; + } + + int[] statusArray = null; + if (stateType != null) { + statusArray = new int[]{stateType.ordinal()}; + } + + Date start = null; + Date end = null; + if (StringUtils.isNotEmpty(startDate)) { + start = DateUtils.getScheduleDate(startDate); + if (start == null) { + return generateInvalidParamRes(result, "startDate"); + } + } + if (StringUtils.isNotEmpty(endDate)) { + end = DateUtils.getScheduleDate(endDate); + if (end == null) { + return generateInvalidParamRes(result, "endDate"); + } + } + + Page page = new Page<>(pageNo, pageSize); + PageInfo> pageInfo = new PageInfo<>(pageNo, pageSize); + int executorId = usersService.getUserIdByName(executorName); + + IPage taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging( + page, project.getId(), processInstanceId, processInstanceName, searchVal, taskName, executorId, statusArray, host, start, end + ); + Set exclusionSet = new HashSet<>(); + exclusionSet.add(Constants.CLASS); + exclusionSet.add("taskJson"); + List taskInstanceList = taskInstanceIPage.getRecords(); + + for (TaskInstance taskInstance : taskInstanceList) { + taskInstance.setDuration(DateUtils.format2Duration(taskInstance.getStartTime(), taskInstance.getEndTime())); + User executor = usersService.queryUser(taskInstance.getExecutorId()); + if (null != executor) { + taskInstance.setExecutorName(executor.getUserName()); + } + } + pageInfo.setTotalCount((int) taskInstanceIPage.getTotal()); + pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(), exclusionSet)); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * change one task instance's state from failure to forced success + * + * @param loginUser login user + * @param projectName project name + * @param taskInstanceId task instance id + * @return the result code and msg + */ + @Override + public Map forceTaskSuccess(User loginUser, String projectName, Integer taskInstanceId) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + // check user auth + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status status = (Status) checkResult.get(Constants.STATUS); + if (status != Status.SUCCESS) { + return checkResult; + } + + // check whether the task instance can be found + TaskInstance task = taskInstanceMapper.selectById(taskInstanceId); + if (task == null) { + putMsg(result, Status.TASK_INSTANCE_NOT_FOUND); + return result; + } + + // check whether the task instance state type is failure + if (!task.getState().typeIsFailure()) { + putMsg(result, Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState().toString()); + return result; + } + + // change the state of the task instance + task.setState(ExecutionStatus.FORCED_SUCCESS); + int changedNum = taskInstanceMapper.updateById(task); + if (changedNum > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.FORCE_TASK_SUCCESS_ERROR); + } + + return result; + } + + /*** + * generate {@link org.apache.dolphinscheduler.api.enums.Status#REQUEST_PARAMS_NOT_VALID_ERROR} res with param name + * @param result exist result map + * @param params invalid params name + * @return update result map + */ + private Map generateInvalidParamRes(Map result, String params) { + result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), params)); + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskRecordServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskRecordServiceImpl.java new file mode 100644 index 0000000000..c755da057b --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskRecordServiceImpl.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import static org.apache.dolphinscheduler.common.Constants.TASK_RECORD_TABLE_HISTORY_HIVE_LOG; +import static org.apache.dolphinscheduler.common.Constants.TASK_RECORD_TABLE_HIVE_LOG; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.TaskRecordService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.dao.TaskRecordDao; +import org.apache.dolphinscheduler.dao.entity.TaskRecord; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.stereotype.Service; + +/** + * task record service impl + */ +@Service +public class TaskRecordServiceImpl extends BaseServiceImpl implements TaskRecordService { + + /** + * query task record list paging + * + * @param taskName task name + * @param state state + * @param sourceTable source table + * @param destTable destination table + * @param taskDate task date + * @param startDate start time + * @param endDate end time + * @param pageNo page numbere + * @param pageSize page size + * @param isHistory is history + * @return task record list + */ + @Override + public Map queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate, + String taskDate, String sourceTable, + String destTable, String endDate, + String state, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + + Map map = new HashMap<>(); + map.put("taskName", taskName); + map.put("taskDate", taskDate); + map.put("state", state); + map.put("sourceTable", sourceTable); + map.put("targetTable", destTable); + map.put("startTime", startDate); + map.put("endTime", endDate); + map.put("offset", pageInfo.getStart().toString()); + map.put("pageSize", pageInfo.getPageSize().toString()); + + String table = isHistory ? TASK_RECORD_TABLE_HISTORY_HIVE_LOG : TASK_RECORD_TABLE_HIVE_LOG; + int count = TaskRecordDao.countTaskRecord(map, table); + List recordList = TaskRecordDao.queryAllTaskRecord(map, table); + pageInfo.setTotalCount(count); + pageInfo.setLists(recordList); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java index 8aafe9102e..9deef7d035 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.service.TenantService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.RegexUtils; @@ -42,8 +41,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -52,12 +49,10 @@ import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** - * tenant service + * tenant service impl */ @Service -public class TenantServiceImpl extends BaseService implements TenantService { - - private static final Logger logger = LoggerFactory.getLogger(TenantServiceImpl.class); +public class TenantServiceImpl extends BaseServiceImpl implements TenantService { @Autowired private TenantMapper tenantMapper; @@ -81,13 +76,14 @@ public class TenantServiceImpl extends BaseService implements TenantService { * @return create result code * @throws Exception exception */ + @Override @Transactional(rollbackFor = Exception.class) public Map createTenant(User loginUser, String tenantCode, int queueId, String desc) throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); result.put(Constants.STATUS, false); if (isNotAdmin(loginUser, result)) { return result; @@ -138,9 +134,10 @@ public class TenantServiceImpl extends BaseService implements TenantService { * @param pageSize page size * @return tenant list page */ + @Override public Map queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { return result; } @@ -168,10 +165,11 @@ public class TenantServiceImpl extends BaseService implements TenantService { * @return update result code * @throws Exception exception */ + @Override public Map updateTenant(User loginUser, int id, String tenantCode, int queueId, String desc) throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); result.put(Constants.STATUS, false); if (isNotAdmin(loginUser, result)) { @@ -231,9 +229,10 @@ public class TenantServiceImpl extends BaseService implements TenantService { * @return delete result code * @throws Exception exception */ + @Override @Transactional(rollbackFor = Exception.class) public Map deleteTenantById(User loginUser, int id) throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { return result; @@ -291,7 +290,7 @@ public class TenantServiceImpl extends BaseService implements TenantService { */ public Map queryTenantList(String tenantCode) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List resourceList = tenantMapper.queryByTenantCode(tenantCode); if (CollectionUtils.isNotEmpty(resourceList)) { @@ -309,9 +308,10 @@ public class TenantServiceImpl extends BaseService implements TenantService { * @param loginUser login user * @return tenant list */ + @Override public Map queryTenantList(User loginUser) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List resourceList = tenantMapper.selectList(null); result.put(Constants.DATA_LIST, resourceList); @@ -326,6 +326,7 @@ public class TenantServiceImpl extends BaseService implements TenantService { * @param tenantCode tenant code * @return true if tenant code can user, otherwise return false */ + @Override public Result verifyTenantCode(String tenantCode) { Result result = new Result(); if (checkTenantExists(tenantCode)) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java new file mode 100644 index 0000000000..27a3b60e92 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java @@ -0,0 +1,332 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.UdfFuncService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.UdfType; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.dolphinscheduler.dao.entity.UdfFunc; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; +import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; +import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * udf func service impl + */ +@Service +public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncService { + + private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceImpl.class); + + @Autowired + private ResourceMapper resourceMapper; + + @Autowired + private UdfFuncMapper udfFuncMapper; + + @Autowired + private UDFUserMapper udfUserMapper; + + /** + * create udf function + * + * @param loginUser login user + * @param type udf type + * @param funcName function name + * @param argTypes argument types + * @param database database + * @param desc description + * @param resourceId resource id + * @param className class name + * @return create result code + */ + @Override + public Result createUdfFunction(User loginUser, + String funcName, + String className, + String argTypes, + String database, + String desc, + UdfType type, + int resourceId) { + Result result = new Result<>(); + + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()) { + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + + // verify udf func name exist + if (checkUdfFuncNameExists(funcName)) { + putMsg(result, Status.UDF_FUNCTION_EXISTS); + return result; + } + + Resource resource = resourceMapper.selectById(resourceId); + if (resource == null) { + logger.error("resourceId {} is not exist", resourceId); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + + //save data + UdfFunc udf = new UdfFunc(); + Date now = new Date(); + udf.setUserId(loginUser.getId()); + udf.setFuncName(funcName); + udf.setClassName(className); + if (StringUtils.isNotEmpty(argTypes)) { + udf.setArgTypes(argTypes); + } + if (StringUtils.isNotEmpty(database)) { + udf.setDatabase(database); + } + udf.setDescription(desc); + udf.setResourceId(resourceId); + udf.setResourceName(resource.getFullName()); + udf.setType(type); + + udf.setCreateTime(now); + udf.setUpdateTime(now); + + udfFuncMapper.insert(udf); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * + * @param name name + * @return check result code + */ + private boolean checkUdfFuncNameExists(String name) { + List resource = udfFuncMapper.queryUdfByIdStr(null, name); + return resource != null && !resource.isEmpty(); + } + + /** + * query udf function + * + * @param id udf function id + * @return udf function detail + */ + @Override + public Map queryUdfFuncDetail(int id) { + Map result = new HashMap<>(); + UdfFunc udfFunc = udfFuncMapper.selectById(id); + if (udfFunc == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + result.put(Constants.DATA_LIST, udfFunc); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * updateProcessInstance udf function + * + * @param udfFuncId udf function id + * @param type resource type + * @param funcName function name + * @param argTypes argument types + * @param database data base + * @param desc description + * @param resourceId resource id + * @param className class name + * @return update result code + */ + @Override + public Map updateUdfFunc(int udfFuncId, + String funcName, + String className, + String argTypes, + String database, + String desc, + UdfType type, + int resourceId) { + Map result = new HashMap<>(); + // verify udfFunc is exist + UdfFunc udf = udfFuncMapper.selectUdfById(udfFuncId); + + if (udf == null) { + result.put(Constants.STATUS, Status.UDF_FUNCTION_NOT_EXIST); + result.put(Constants.MSG, Status.UDF_FUNCTION_NOT_EXIST.getMsg()); + return result; + } + + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()) { + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + + // verify udfFuncName is exist + if (!funcName.equals(udf.getFuncName())) { + if (checkUdfFuncNameExists(funcName)) { + logger.error("UdfFunc {} has exist, can't create again.", funcName); + result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS); + result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg()); + return result; + } + } + + Resource resource = resourceMapper.selectById(resourceId); + if (resource == null) { + logger.error("resourceId {} is not exist", resourceId); + result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST); + result.put(Constants.MSG, Status.RESOURCE_NOT_EXIST.getMsg()); + return result; + } + Date now = new Date(); + udf.setFuncName(funcName); + udf.setClassName(className); + udf.setArgTypes(argTypes); + if (StringUtils.isNotEmpty(database)) { + udf.setDatabase(database); + } + udf.setDescription(desc); + udf.setResourceId(resourceId); + udf.setResourceName(resource.getFullName()); + udf.setType(type); + + udf.setUpdateTime(now); + + udfFuncMapper.updateById(udf); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query udf function list paging + * + * @param loginUser login user + * @param pageNo page number + * @param pageSize page size + * @param searchVal search value + * @return udf function list page + */ + @Override + public Map queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + IPage udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo); + pageInfo.setTotalCount((int)udfFuncList.getTotal()); + pageInfo.setLists(udfFuncList.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * get udf functions + * + * @param loginUser login user + * @param searchVal search value + * @param pageSize page size + * @param pageNo page number + * @return udf function list page + */ + private IPage getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) { + int userId = loginUser.getId(); + if (isAdmin(loginUser)) { + userId = 0; + } + Page page = new Page<>(pageNo, pageSize); + return udfFuncMapper.queryUdfFuncPaging(page, userId, searchVal); + } + + /** + * query udf list + * + * @param loginUser login user + * @param type udf type + * @return udf func list + */ + @Override + public Map queryUdfFuncList(User loginUser, Integer type) { + Map result = new HashMap<>(); + int userId = loginUser.getId(); + if (isAdmin(loginUser)) { + userId = 0; + } + List udfFuncList = udfFuncMapper.getUdfFuncByType(userId, type); + + result.put(Constants.DATA_LIST, udfFuncList); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete udf function + * + * @param id udf function id + * @return delete result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Result delete(int id) { + Result result = new Result<>(); + udfFuncMapper.deleteById(id); + udfUserMapper.deleteByUdfFuncId(id); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * verify udf function by name + * + * @param name name + * @return true if the name can user, otherwise return false + */ + @Override + public Result verifyUdfFuncByName(String name) { + Result result = new Result<>(); + if (checkUdfFuncNameExists(name)) { + putMsg(result, Status.UDF_FUNCTION_EXISTS); + } else { + putMsg(result, Status.SUCCESS); + } + return result; + } + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java index 0c3cb5dfdb..0f1483970a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.service.UiPluginService; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.PluginType; @@ -34,20 +33,14 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** - * UiPluginServiceImpl + * ui plugin service impl */ @Service -public class UiPluginServiceImpl extends BaseService implements UiPluginService { +public class UiPluginServiceImpl extends BaseServiceImpl implements UiPluginService { @Autowired PluginDefineMapper pluginDefineMapper; - private static final String LANGUAGE_REGEX = "\"([^\"]*)\""; - - private static final String LANGUAGE_SYMBOL = "$t"; - - private static final String ESCAPE_SYMBOL = "\\"; - @Override public Map queryUiPluginsByType(PluginType pluginType) { Map result = new HashMap<>(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java new file mode 100644 index 0000000000..865f4c329a --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java @@ -0,0 +1,1094 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; +import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.api.service.UsersService; +import org.apache.dolphinscheduler.api.utils.CheckUtils; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.EncryptionUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.AlertGroup; +import org.apache.dolphinscheduler.dao.entity.DatasourceUser; +import org.apache.dolphinscheduler.dao.entity.ProjectUser; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.dolphinscheduler.dao.entity.ResourcesUser; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.UDFUser; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; +import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; +import org.apache.dolphinscheduler.dao.mapper.TenantMapper; +import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; + +import java.io.IOException; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * users service impl + */ +@Service +public class UsersServiceImpl extends BaseServiceImpl implements UsersService { + + private static final Logger logger = LoggerFactory.getLogger(UsersServiceImpl.class); + + @Autowired + private UserMapper userMapper; + + @Autowired + private TenantMapper tenantMapper; + + @Autowired + private ProjectUserMapper projectUserMapper; + + @Autowired + private ResourceUserMapper resourceUserMapper; + + @Autowired + private ResourceMapper resourceMapper; + + @Autowired + private DataSourceUserMapper datasourceUserMapper; + + @Autowired + private UDFUserMapper udfUserMapper; + + @Autowired + private AlertGroupMapper alertGroupMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + + /** + * create user, only system admin have permission + * + * @param loginUser login user + * @param userName user name + * @param userPassword user password + * @param email email + * @param tenantId tenant id + * @param phone phone + * @param queue queue + * @return create result code + * @throws Exception exception + */ + @Override + @Transactional(rollbackFor = Exception.class) + public Map createUser(User loginUser, + String userName, + String userPassword, + String email, + int tenantId, + String phone, + String queue, + int state) throws IOException { + Map result = new HashMap<>(); + + //check all user params + String msg = this.checkUserParams(userName, userPassword, email, phone); + + if (!StringUtils.isEmpty(msg)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg); + return result; + } + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + if (!checkTenantExists(tenantId)) { + putMsg(result, Status.TENANT_NOT_EXIST); + return result; + } + + User user = createUser(userName, userPassword, email, tenantId, phone, queue, state); + + Tenant tenant = tenantMapper.queryById(tenantId); + // resource upload startup + if (PropertyUtils.getResUploadStartupState()) { + // if tenant not exists + if (!HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(tenant.getTenantCode()))) { + createTenantDirIfNotExists(tenant.getTenantCode()); + } + String userPath = HadoopUtils.getHdfsUserDir(tenant.getTenantCode(), user.getId()); + HadoopUtils.getInstance().mkdir(userPath); + } + + putMsg(result, Status.SUCCESS); + return result; + + } + + @Override + @Transactional(rollbackFor = RuntimeException.class) + public User createUser(String userName, + String userPassword, + String email, + int tenantId, + String phone, + String queue, + int state) { + User user = new User(); + Date now = new Date(); + + user.setUserName(userName); + user.setUserPassword(EncryptionUtils.getMd5(userPassword)); + user.setEmail(email); + user.setTenantId(tenantId); + user.setPhone(phone); + user.setState(state); + // create general users, administrator users are currently built-in + user.setUserType(UserType.GENERAL_USER); + user.setCreateTime(now); + user.setUpdateTime(now); + if (StringUtils.isEmpty(queue)) { + queue = ""; + } + user.setQueue(queue); + + // save user + userMapper.insert(user); + return user; + } + + /*** + * create User for ldap login + */ + @Override + @Transactional(rollbackFor = Exception.class) + public User createUser(UserType userType, String userId, String email) { + User user = new User(); + Date now = new Date(); + + user.setUserName(userId); + user.setEmail(email); + // create general users, administrator users are currently built-in + user.setUserType(userType); + user.setCreateTime(now); + user.setUpdateTime(now); + user.setQueue(""); + + // save user + userMapper.insert(user); + return user; + } + + /** + * get user by user name + * + * @param userName user name + * @return exist user or null + */ + @Override + public User getUserByUserName(String userName) { + return userMapper.queryByUserNameAccurately(userName); + } + + /** + * query user by id + * + * @param id id + * @return user info + */ + @Override + public User queryUser(int id) { + return userMapper.selectById(id); + } + + /** + * query user + * + * @param name name + * @return user info + */ + @Override + public User queryUser(String name) { + return userMapper.queryByUserNameAccurately(name); + } + + /** + * query user + * + * @param name name + * @param password password + * @return user info + */ + @Override + public User queryUser(String name, String password) { + String md5 = EncryptionUtils.getMd5(password); + return userMapper.queryUserByNamePassword(name, md5); + } + + /** + * get user id by user name + * + * @param name user name + * @return if name empty 0, user not exists -1, user exist user id + */ + @Override + public int getUserIdByName(String name) { + //executor name query + int executorId = 0; + if (StringUtils.isNotEmpty(name)) { + User executor = queryUser(name); + if (null != executor) { + executorId = executor.getId(); + } else { + executorId = -1; + } + } + + return executorId; + } + + /** + * query user list + * + * @param loginUser login user + * @param pageNo page number + * @param searchVal search avlue + * @param pageSize page size + * @return user list page + */ + @Override + public Map queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(); + + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + + Page page = new Page<>(pageNo, pageSize); + + IPage scheduleList = userMapper.queryUserPaging(page, searchVal); + + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int) scheduleList.getTotal()); + pageInfo.setLists(scheduleList.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * updateProcessInstance user + * + * + * @param loginUser + * @param userId user id + * @param userName user name + * @param userPassword user password + * @param email email + * @param tenantId tennat id + * @param phone phone + * @param queue queue + * @return update result code + * @throws Exception exception + */ + @Override + public Map updateUser(User loginUser, int userId, + String userName, + String userPassword, + String email, + int tenantId, + String phone, + String queue, + int state) throws IOException { + Map result = new HashMap<>(); + result.put(Constants.STATUS, false); + + if (check(result, !hasPerm(loginUser, userId), Status.USER_NO_OPERATION_PERM)) { + return result; + } + User user = userMapper.selectById(userId); + if (user == null) { + putMsg(result, Status.USER_NOT_EXIST, userId); + return result; + } + if (StringUtils.isNotEmpty(userName)) { + + if (!CheckUtils.checkUserName(userName)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); + return result; + } + + User tempUser = userMapper.queryByUserNameAccurately(userName); + if (tempUser != null && tempUser.getId() != userId) { + putMsg(result, Status.USER_NAME_EXIST); + return result; + } + user.setUserName(userName); + } + + if (StringUtils.isNotEmpty(userPassword)) { + if (!CheckUtils.checkPassword(userPassword)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userPassword); + return result; + } + user.setUserPassword(EncryptionUtils.getMd5(userPassword)); + } + + if (StringUtils.isNotEmpty(email)) { + if (!CheckUtils.checkEmail(email)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, email); + return result; + } + user.setEmail(email); + } + + if (StringUtils.isNotEmpty(phone) && !CheckUtils.checkPhone(phone)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, phone); + return result; + } + user.setPhone(phone); + user.setQueue(queue); + user.setState(state); + Date now = new Date(); + user.setUpdateTime(now); + + //if user switches the tenant, the user's resources need to be copied to the new tenant + if (user.getTenantId() != tenantId) { + Tenant oldTenant = tenantMapper.queryById(user.getTenantId()); + //query tenant + Tenant newTenant = tenantMapper.queryById(tenantId); + if (newTenant != null) { + // if hdfs startup + if (PropertyUtils.getResUploadStartupState() && oldTenant != null) { + String newTenantCode = newTenant.getTenantCode(); + String oldResourcePath = HadoopUtils.getHdfsResDir(oldTenant.getTenantCode()); + String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode()); + + // if old tenant dir exists + if (HadoopUtils.getInstance().exists(oldResourcePath)) { + String newResourcePath = HadoopUtils.getHdfsResDir(newTenantCode); + String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode); + + //file resources list + List fileResourcesList = resourceMapper.queryResourceList( + null, userId, ResourceType.FILE.ordinal()); + if (CollectionUtils.isNotEmpty(fileResourcesList)) { + ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(fileResourcesList); + ResourceComponent resourceComponent = resourceTreeVisitor.visit(); + copyResourceFiles(resourceComponent, oldResourcePath, newResourcePath); + } + + //udf resources + List udfResourceList = resourceMapper.queryResourceList( + null, userId, ResourceType.UDF.ordinal()); + if (CollectionUtils.isNotEmpty(udfResourceList)) { + ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(udfResourceList); + ResourceComponent resourceComponent = resourceTreeVisitor.visit(); + copyResourceFiles(resourceComponent, oldUdfsPath, newUdfsPath); + } + + //Delete the user from the old tenant directory + String oldUserPath = HadoopUtils.getHdfsUserDir(oldTenant.getTenantCode(), userId); + HadoopUtils.getInstance().delete(oldUserPath, true); + } else { + // if old tenant dir not exists , create + createTenantDirIfNotExists(oldTenant.getTenantCode()); + } + + if (HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(newTenant.getTenantCode()))) { + //create user in the new tenant directory + String newUserPath = HadoopUtils.getHdfsUserDir(newTenant.getTenantCode(), user.getId()); + HadoopUtils.getInstance().mkdir(newUserPath); + } else { + // if new tenant dir not exists , create + createTenantDirIfNotExists(newTenant.getTenantCode()); + } + + } + } + user.setTenantId(tenantId); + } + + // updateProcessInstance user + userMapper.updateById(user); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete user + * + * @param loginUser login user + * @param id user id + * @return delete result code + * @throws Exception exception when operate hdfs + */ + @Override + public Map deleteUserById(User loginUser, int id) throws IOException { + Map result = new HashMap<>(); + //only admin can operate + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM, id); + return result; + } + //check exist + User tempUser = userMapper.selectById(id); + if (tempUser == null) { + putMsg(result, Status.USER_NOT_EXIST, id); + return result; + } + // delete user + User user = userMapper.queryTenantCodeByUserId(id); + + if (user != null) { + if (PropertyUtils.getResUploadStartupState()) { + String userPath = HadoopUtils.getHdfsUserDir(user.getTenantCode(), id); + if (HadoopUtils.getInstance().exists(userPath)) { + HadoopUtils.getInstance().delete(userPath, true); + } + } + } + + userMapper.deleteById(id); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * grant project + * + * @param loginUser login user + * @param userId user id + * @param projectIds project id array + * @return grant result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map grantProject(User loginUser, int userId, String projectIds) { + Map result = new HashMap<>(); + result.put(Constants.STATUS, false); + + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + + //check exist + User tempUser = userMapper.selectById(userId); + if (tempUser == null) { + putMsg(result, Status.USER_NOT_EXIST, userId); + return result; + } + //if the selected projectIds are empty, delete all items associated with the user + projectUserMapper.deleteProjectRelation(0, userId); + + if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS)) { + return result; + } + + String[] projectIdArr = projectIds.split(","); + + for (String projectId : projectIdArr) { + Date now = new Date(); + ProjectUser projectUser = new ProjectUser(); + projectUser.setUserId(userId); + projectUser.setProjectId(Integer.parseInt(projectId)); + projectUser.setPerm(7); + projectUser.setCreateTime(now); + projectUser.setUpdateTime(now); + projectUserMapper.insert(projectUser); + } + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * grant resource + * + * @param loginUser login user + * @param userId user id + * @param resourceIds resource id array + * @return grant result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map grantResources(User loginUser, int userId, String resourceIds) { + Map result = new HashMap<>(); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + User user = userMapper.selectById(userId); + if (user == null) { + putMsg(result, Status.USER_NOT_EXIST, userId); + return result; + } + + Set needAuthorizeResIds = new HashSet<>(); + if (StringUtils.isNotBlank(resourceIds)) { + String[] resourceFullIdArr = resourceIds.split(","); + // need authorize resource id set + for (String resourceFullId : resourceFullIdArr) { + String[] resourceIdArr = resourceFullId.split("-"); + for (int i = 0; i <= resourceIdArr.length - 1; i++) { + int resourceIdValue = Integer.parseInt(resourceIdArr[i]); + needAuthorizeResIds.add(resourceIdValue); + } + } + } + + //get the authorized resource id list by user id + List oldAuthorizedRes = resourceMapper.queryAuthorizedResourceList(userId); + //if resource type is UDF,need check whether it is bound by UDF function + Set oldAuthorizedResIds = oldAuthorizedRes.stream().map(Resource::getId).collect(Collectors.toSet()); + + //get the unauthorized resource id list + oldAuthorizedResIds.removeAll(needAuthorizeResIds); + + if (CollectionUtils.isNotEmpty(oldAuthorizedResIds)) { + + // get all resource id of process definitions those is released + List> list = processDefinitionMapper.listResourcesByUser(userId); + Map> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list); + Set resourceIdSet = resourceProcessMap.keySet(); + + resourceIdSet.retainAll(oldAuthorizedResIds); + if (CollectionUtils.isNotEmpty(resourceIdSet)) { + logger.error("can't be deleted,because it is used of process definition"); + for (Integer resId : resourceIdSet) { + logger.error("resource id:{} is used of process definition {}", resId, resourceProcessMap.get(resId)); + } + putMsg(result, Status.RESOURCE_IS_USED); + return result; + } + + } + + resourceUserMapper.deleteResourceUser(userId, 0); + + if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) { + return result; + } + + for (int resourceIdValue : needAuthorizeResIds) { + Resource resource = resourceMapper.selectById(resourceIdValue); + if (resource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + + Date now = new Date(); + ResourcesUser resourcesUser = new ResourcesUser(); + resourcesUser.setUserId(userId); + resourcesUser.setResourcesId(resourceIdValue); + if (resource.isDirectory()) { + resourcesUser.setPerm(Constants.AUTHORIZE_READABLE_PERM); + } else { + resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM); + } + + resourcesUser.setCreateTime(now); + resourcesUser.setUpdateTime(now); + resourceUserMapper.insert(resourcesUser); + + } + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * grant udf function + * + * @param loginUser login user + * @param userId user id + * @param udfIds udf id array + * @return grant result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map grantUDFFunction(User loginUser, int userId, String udfIds) { + Map result = new HashMap<>(); + + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + User user = userMapper.selectById(userId); + if (user == null) { + putMsg(result, Status.USER_NOT_EXIST, userId); + return result; + } + + udfUserMapper.deleteByUserId(userId); + + if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) { + return result; + } + + String[] resourcesIdArr = udfIds.split(","); + + for (String udfId : resourcesIdArr) { + Date now = new Date(); + UDFUser udfUser = new UDFUser(); + udfUser.setUserId(userId); + udfUser.setUdfId(Integer.parseInt(udfId)); + udfUser.setPerm(7); + udfUser.setCreateTime(now); + udfUser.setUpdateTime(now); + udfUserMapper.insert(udfUser); + } + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * grant datasource + * + * @param loginUser login user + * @param userId user id + * @param datasourceIds data source id array + * @return grant result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map grantDataSource(User loginUser, int userId, String datasourceIds) { + Map result = new HashMap<>(); + result.put(Constants.STATUS, false); + + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + User user = userMapper.selectById(userId); + if (user == null) { + putMsg(result, Status.USER_NOT_EXIST, userId); + return result; + } + + datasourceUserMapper.deleteByUserId(userId); + + if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS)) { + return result; + } + + String[] datasourceIdArr = datasourceIds.split(","); + + for (String datasourceId : datasourceIdArr) { + Date now = new Date(); + + DatasourceUser datasourceUser = new DatasourceUser(); + datasourceUser.setUserId(userId); + datasourceUser.setDatasourceId(Integer.parseInt(datasourceId)); + datasourceUser.setPerm(7); + datasourceUser.setCreateTime(now); + datasourceUser.setUpdateTime(now); + datasourceUserMapper.insert(datasourceUser); + } + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query user info + * + * @param loginUser login user + * @return user info + */ + @Override + public Map getUserInfo(User loginUser) { + + Map result = new HashMap<>(); + + User user = null; + if (loginUser.getUserType() == UserType.ADMIN_USER) { + user = loginUser; + } else { + user = userMapper.queryDetailsById(loginUser.getId()); + + List alertGroups = alertGroupMapper.queryByUserId(loginUser.getId()); + + StringBuilder sb = new StringBuilder(); + + if (alertGroups != null && !alertGroups.isEmpty()) { + for (int i = 0; i < alertGroups.size() - 1; i++) { + sb.append(alertGroups.get(i).getGroupName() + ","); + } + sb.append(alertGroups.get(alertGroups.size() - 1)); + user.setAlertGroup(sb.toString()); + } + } + + result.put(Constants.DATA_LIST, user); + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query user list + * + * @param loginUser login user + * @return user list + */ + @Override + public Map queryAllGeneralUsers(User loginUser) { + Map result = new HashMap<>(); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + + List userList = userMapper.queryAllGeneralUser(); + result.put(Constants.DATA_LIST, userList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query user list + * + * @param loginUser login user + * @return user list + */ + @Override + public Map queryUserList(User loginUser) { + Map result = new HashMap<>(); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + + List userList = userMapper.selectList(null); + result.put(Constants.DATA_LIST, userList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify user name exists + * + * @param userName user name + * @return true if user name not exists, otherwise return false + */ + @Override + public Result verifyUserName(String userName) { + + Result result = new Result<>(); + User user = userMapper.queryByUserNameAccurately(userName); + if (user != null) { + putMsg(result, Status.USER_NAME_EXIST); + } else { + putMsg(result, Status.SUCCESS); + } + + return result; + } + + /** + * unauthorized user + * + * @param loginUser login user + * @param alertgroupId alert group id + * @return unauthorize result code + */ + @Override + public Map unauthorizedUser(User loginUser, Integer alertgroupId) { + + Map result = new HashMap<>(); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + + List userList = userMapper.selectList(null); + List resultUsers = new ArrayList<>(); + Set userSet = null; + if (userList != null && !userList.isEmpty()) { + userSet = new HashSet<>(userList); + + List authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId); + + Set authedUserSet = null; + if (authedUserList != null && !authedUserList.isEmpty()) { + authedUserSet = new HashSet<>(authedUserList); + userSet.removeAll(authedUserSet); + } + resultUsers = new ArrayList<>(userSet); + } + result.put(Constants.DATA_LIST, resultUsers); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * authorized user + * + * @param loginUser login user + * @param alertgroupId alert group id + * @return authorized result code + */ + @Override + public Map authorizedUser(User loginUser, Integer alertgroupId) { + Map result = new HashMap<>(); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + List userList = userMapper.queryUserListByAlertGroupId(alertgroupId); + result.put(Constants.DATA_LIST, userList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * @param tenantId tenant id + * @return true if tenant exists, otherwise return false + */ + private boolean checkTenantExists(int tenantId) { + return tenantMapper.queryById(tenantId) != null; + } + + /** + * @return if check failed return the field, otherwise return null + */ + private String checkUserParams(String userName, String password, String email, String phone) { + + String msg = null; + if (!CheckUtils.checkUserName(userName)) { + + msg = userName; + } else if (!CheckUtils.checkPassword(password)) { + + msg = password; + } else if (!CheckUtils.checkEmail(email)) { + + msg = email; + } else if (!CheckUtils.checkPhone(phone)) { + + msg = phone; + } + + return msg; + } + + /** + * copy resource files + * + * @param resourceComponent resource component + * @param srcBasePath src base path + * @param dstBasePath dst base path + * @throws IOException io exception + */ + private void copyResourceFiles(ResourceComponent resourceComponent, String srcBasePath, String dstBasePath) throws IOException { + List components = resourceComponent.getChildren(); + + if (CollectionUtils.isNotEmpty(components)) { + for (ResourceComponent component : components) { + // verify whether exist + if (!HadoopUtils.getInstance().exists(String.format("%s/%s", srcBasePath, component.getFullName()))) { + logger.error("resource file: {} not exist,copy error", component.getFullName()); + throw new ServiceException(Status.RESOURCE_NOT_EXIST); + } + + if (!component.isDirctory()) { + // copy it to dst + HadoopUtils.getInstance().copy(String.format("%s/%s", srcBasePath, component.getFullName()), String.format("%s/%s", dstBasePath, component.getFullName()), false, true); + continue; + } + + if (CollectionUtils.isEmpty(component.getChildren())) { + // if not exist,need create it + if (!HadoopUtils.getInstance().exists(String.format("%s/%s", dstBasePath, component.getFullName()))) { + HadoopUtils.getInstance().mkdir(String.format("%s/%s", dstBasePath, component.getFullName())); + } + } else { + copyResourceFiles(component, srcBasePath, dstBasePath); + } + } + } + } + + /** + * register user, default state is 0, default tenant_id is 1, no phone, no queue + * + * @param userName user name + * @param userPassword user password + * @param repeatPassword repeat password + * @param email email + * @return register result code + * @throws Exception exception + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map registerUser(String userName, String userPassword, String repeatPassword, String email) { + Map result = new HashMap<>(); + + //check user params + String msg = this.checkUserParams(userName, userPassword, email, ""); + + if (!StringUtils.isEmpty(msg)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg); + return result; + } + + if (!userPassword.equals(repeatPassword)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "two passwords are not same"); + return result; + } + User user = createUser(userName, userPassword, email, 1, "", "", Flag.NO.ordinal()); + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, user); + return result; + } + + /** + * activate user, only system admin have permission, change user state code 0 to 1 + * + * @param loginUser login user + * @param userName user name + * @return create result code + */ + @Override + public Map activateUser(User loginUser, String userName) { + Map result = new HashMap<>(); + result.put(Constants.STATUS, false); + + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + if (!CheckUtils.checkUserName(userName)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); + return result; + } + + User user = userMapper.queryByUserNameAccurately(userName); + + if (user == null) { + putMsg(result, Status.USER_NOT_EXIST, userName); + return result; + } + + if (user.getState() != Flag.NO.ordinal()) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); + return result; + } + + user.setState(Flag.YES.ordinal()); + Date now = new Date(); + user.setUpdateTime(now); + userMapper.updateById(user); + User responseUser = userMapper.queryByUserNameAccurately(userName); + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, responseUser); + return result; + } + + /** + * activate user, only system admin have permission, change users state code 0 to 1 + * + * @param loginUser login user + * @param userNames user name + * @return create result code + */ + @Override + public Map batchActivateUser(User loginUser, List userNames) { + Map result = new HashMap<>(); + + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + int totalSuccess = 0; + List successUserNames = new ArrayList<>(); + Map successRes = new HashMap<>(); + int totalFailed = 0; + List> failedInfo = new ArrayList<>(); + Map failedRes = new HashMap<>(); + for (String userName : userNames) { + Map tmpResult = activateUser(loginUser, userName); + if (tmpResult.get(Constants.STATUS) != Status.SUCCESS) { + totalFailed++; + Map failedBody = new HashMap<>(); + failedBody.put("userName", userName); + Status status = (Status) tmpResult.get(Constants.STATUS); + String errorMessage = MessageFormat.format(status.getMsg(), userName); + failedBody.put("msg", errorMessage); + failedInfo.add(failedBody); + } else { + totalSuccess++; + successUserNames.add(userName); + } + } + successRes.put("sum", totalSuccess); + successRes.put("userName", successUserNames); + failedRes.put("sum", totalFailed); + failedRes.put("info", failedInfo); + Map res = new HashMap<>(); + res.put("success", successRes); + res.put("failed", failedRes); + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, res); + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java new file mode 100644 index 0000000000..22c7622ab8 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.WorkFlowLineageService; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; +import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation; +import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * work flow lineage service impl + */ +@Service +public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkFlowLineageService { + + @Autowired + private WorkFlowLineageMapper workFlowLineageMapper; + + @Override + public Map queryWorkFlowLineageByName(String workFlowName, int projectId) { + Map result = new HashMap<>(); + List workFlowLineageList = workFlowLineageMapper.queryByName(workFlowName, projectId); + result.put(Constants.DATA_LIST, workFlowLineageList); + putMsg(result, Status.SUCCESS); + return result; + } + + private void getWorkFlowRelationRecursion(Set ids, List workFlowRelations, Set sourceIds) { + for (int id : ids) { + sourceIds.addAll(ids); + List workFlowRelationsTmp = workFlowLineageMapper.querySourceTarget(id); + if (workFlowRelationsTmp != null && !workFlowRelationsTmp.isEmpty()) { + Set idsTmp = new HashSet<>(); + for (WorkFlowRelation workFlowRelation:workFlowRelationsTmp) { + if (!sourceIds.contains(workFlowRelation.getTargetWorkFlowId())) { + idsTmp.add(workFlowRelation.getTargetWorkFlowId()); + } + } + workFlowRelations.addAll(workFlowRelationsTmp); + getWorkFlowRelationRecursion(idsTmp, workFlowRelations,sourceIds); + } + } + } + + @Override + public Map queryWorkFlowLineageByIds(Set ids, int projectId) { + Map result = new HashMap<>(); + List workFlowLineageList = workFlowLineageMapper.queryByIds(ids, projectId); + Map workFlowLists = new HashMap<>(); + Set idsV = new HashSet<>(); + if (ids == null || ids.isEmpty()) { + for (WorkFlowLineage workFlowLineage:workFlowLineageList) { + idsV.add(workFlowLineage.getWorkFlowId()); + } + } else { + idsV = ids; + } + List workFlowRelations = new ArrayList<>(); + Set sourceIds = new HashSet<>(); + getWorkFlowRelationRecursion(idsV, workFlowRelations, sourceIds); + + Set idSet = new HashSet<>(); + //If the incoming parameter is not empty, you need to add downstream workflow detail attributes + if (ids != null && !ids.isEmpty()) { + for (WorkFlowRelation workFlowRelation : workFlowRelations) { + idSet.add(workFlowRelation.getTargetWorkFlowId()); + } + for (int id : ids) { + idSet.remove(id); + } + if (!idSet.isEmpty()) { + workFlowLineageList.addAll(workFlowLineageMapper.queryByIds(idSet, projectId)); + } + } + + workFlowLists.put(Constants.WORKFLOW_LIST, workFlowLineageList); + workFlowLists.put(Constants.WORKFLOW_RELATION_LIST, workFlowRelations); + result.put(Constants.DATA_LIST, workFlowLists); + putMsg(result, Status.SUCCESS); + return result; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java new file mode 100644 index 0000000000..2f899ed1b7 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java @@ -0,0 +1,187 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; +import static org.apache.dolphinscheduler.common.Constants.SLASH; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.WorkerGroupService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.WorkerGroup; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + + +/** + * worker group service impl + */ +@Service +public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGroupService { + + private static final String NO_NODE_EXCEPTION_REGEX = "KeeperException$NoNodeException"; + + @Autowired + protected ZookeeperCachedOperator zookeeperCachedOperator; + + @Autowired + ProcessInstanceMapper processInstanceMapper; + + /** + * query worker group paging + * + * @param loginUser login user + * @param pageNo page number + * @param searchVal search value + * @param pageSize page size + * @return worker group list page + */ + @Override + public Map queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal) { + // list from index + int fromIndex = (pageNo - 1) * pageSize; + // list to index + int toIndex = (pageNo - 1) * pageSize + pageSize; + + Map result = new HashMap<>(); + if (isNotAdmin(loginUser, result)) { + return result; + } + + List workerGroups = getWorkerGroups(true); + + List resultDataList = new ArrayList<>(); + + if (CollectionUtils.isNotEmpty(workerGroups)) { + List searchValDataList = new ArrayList<>(); + + if (StringUtils.isNotEmpty(searchVal)) { + for (WorkerGroup workerGroup : workerGroups) { + if (workerGroup.getName().contains(searchVal)) { + searchValDataList.add(workerGroup); + } + } + } else { + searchValDataList = workerGroups; + } + + if (searchValDataList.size() < pageSize) { + toIndex = (pageNo - 1) * pageSize + searchValDataList.size(); + } + resultDataList = searchValDataList.subList(fromIndex, toIndex); + } + + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount(resultDataList.size()); + pageInfo.setLists(resultDataList); + + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query all worker group + * + * @return all worker group list + */ + @Override + public Map queryAllGroup() { + Map result = new HashMap<>(); + + List workerGroups = getWorkerGroups(false); + + Set availableWorkerGroupSet = workerGroups.stream() + .map(WorkerGroup::getName) + .collect(Collectors.toSet()); + result.put(Constants.DATA_LIST, availableWorkerGroupSet); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * get worker groups + * + * @param isPaging whether paging + * @return WorkerGroup list + */ + private List getWorkerGroups(boolean isPaging) { + + String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS; + List workerGroups = new ArrayList<>(); + List workerGroupList; + try { + workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath); + } catch (Exception e) { + if (e.getMessage().contains(NO_NODE_EXCEPTION_REGEX)) { + if (isPaging) { + return workerGroups; + } + + //ignore noNodeException return Default + WorkerGroup wg = new WorkerGroup(); + wg.setName(DEFAULT_WORKER_GROUP); + workerGroups.add(wg); + return workerGroups; + + } else { + throw e; + } + } + + for (String workerGroup : workerGroupList) { + String workerGroupPath = workerPath + SLASH + workerGroup; + List childrenNodes = zookeeperCachedOperator.getChildrenKeys(workerGroupPath); + if (CollectionUtils.isEmpty(childrenNodes)) { + continue; + } + String timeStamp = childrenNodes.get(0); + for (int i = 0; i < childrenNodes.size(); i++) { + childrenNodes.set(i, Host.of(childrenNodes.get(i)).getAddressAndWeight()); + } + + WorkerGroup wg = new WorkerGroup(); + wg.setName(workerGroup); + if (isPaging) { + wg.setIpList(childrenNodes); + String registeredIpValue = zookeeperCachedOperator.get(workerGroupPath + SLASH + timeStamp); + wg.setCreateTime(DateUtils.stringToDate(registeredIpValue.split(",")[6])); + wg.setUpdateTime(DateUtils.stringToDate(registeredIpValue.split(",")[7])); + } + workerGroups.add(wg); + } + return workerGroups; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java index 9ff7fac463..482cb55306 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java @@ -44,4 +44,13 @@ public class RegexUtils { Matcher isNum = pattern.matcher(str); return isNum.matches(); } + + public static String escapeNRT(String str) { + // Logging should not be vulnerable to injection attacks: Replace pattern-breaking characters + if (str != null && !str.isEmpty()) { + return str.replaceAll("[\n|\r|\t]", "_"); + } + return null; + } + } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java index 1ca9b4a098..9c75e8638e 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.ApiApplicationServer; @@ -21,10 +22,12 @@ import org.apache.dolphinscheduler.api.service.SessionService; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; -import org.junit.*; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Ignore; import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; @@ -32,12 +35,14 @@ import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; - +/** + * abstract controller test + */ @Ignore @RunWith(SpringRunner.class) @SpringBootTest(classes = ApiApplicationServer.class) public class AbstractControllerTest { - private static Logger logger = LoggerFactory.getLogger(AbstractControllerTest.class); + public static final String SESSION_ID = "sessionId"; protected MockMvc mockMvc; @@ -49,6 +54,7 @@ public class AbstractControllerTest { private SessionService sessionService; protected User user; + protected String sessionId; @Before @@ -57,13 +63,11 @@ public class AbstractControllerTest { createSession(); } - @After public void after(){ sessionService.signOut("127.0.0.1", user); } - private void createSession(){ User loginUser = new User(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java index 57bab355a5..dcb4cb3924 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java @@ -14,11 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -28,16 +35,12 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - - - +/** + * access token controller test + */ public class AccessTokenControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(AccessTokenControllerTest.class); + private static Logger logger = LoggerFactory.getLogger(AccessTokenControllerTest.class); @Test public void testCreateToken() throws Exception { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertGroupControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertGroupControllerTest.java index 3c1bb2358c..1a1beb6abd 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertGroupControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertGroupControllerTest.java @@ -17,10 +17,16 @@ package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.AlertType; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -30,12 +36,11 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - +/** + * alert group controller test + */ public class AlertGroupControllerTest extends AbstractControllerTest{ + private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class); @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java index 6848daa746..897a5a61e6 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java @@ -14,37 +14,32 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; -import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; -import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; import org.springframework.http.MediaType; -import org.springframework.test.context.junit4.SpringRunner; -import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MvcResult; -import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import org.springframework.web.context.WebApplicationContext; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - +/** + * data analysis controller test + */ public class DataAnalysisControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); + private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); @Test public void testCountTaskState() throws Exception { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java index 3d89670883..dbd0e70f87 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java @@ -14,11 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; @@ -29,12 +36,6 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - - /** * data source controller test */ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java index f0bd948766..b3e093a067 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java @@ -42,6 +42,7 @@ import org.springframework.util.MultiValueMap; * executor controller test */ public class ExecutorControllerTest extends AbstractControllerTest { + private static Logger logger = LoggerFactory.getLogger(ExecutorControllerTest.class); @Ignore diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java index 9b118f328c..45624b11ed 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java @@ -14,11 +14,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; @@ -28,15 +34,10 @@ import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; - /** * logger controller test */ - @Ignore public class LoggerControllerTest extends AbstractControllerTest { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java index 23e0819268..d8b2b8fd23 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java @@ -14,11 +14,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -28,16 +34,12 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - /** * login controller test */ public class LoginControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(LoginControllerTest.class); + private static Logger logger = LoggerFactory.getLogger(LoginControllerTest.class); @Test public void testLogin() throws Exception { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java index 5dd749e5ef..87427f6730 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java @@ -14,11 +14,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -26,10 +32,6 @@ import org.slf4j.LoggerFactory; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - /** * monitor controller test */ @@ -37,7 +39,6 @@ public class MonitorControllerTest extends AbstractControllerTest { private static final Logger logger = LoggerFactory.getLogger(MonitorControllerTest.class); - @Test public void testListMaster() throws Exception { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java index bdd762afa8..708fb3fe0b 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java index 0b185c84d3..0f58476cc1 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java @@ -14,11 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; @@ -29,17 +36,12 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - /** - * project controller + * project controller test */ public class ProjectControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(ProjectControllerTest.class); + private static Logger logger = LoggerFactory.getLogger(ProjectControllerTest.class); @Test public void testCreateProject() throws Exception { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java index 3ed2ba7c35..cad97f5592 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java @@ -14,11 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -28,11 +35,6 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - /** * queue controller test */ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java index 1ca7421e9d..3a5d3c397f 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java @@ -14,13 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UdfType; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -30,15 +37,11 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - /** * resources controller test */ public class ResourcesControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(ResourcesControllerTest.class); @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java index 8c3090b205..6122c1ac7f 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java @@ -14,14 +14,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.WarningType; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -31,15 +38,11 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - /** * scheduler controller test */ public class SchedulerControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java index 4a26bc2607..a78ac060ad 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java @@ -14,11 +14,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -28,11 +34,11 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - +/** + * task record controller test + */ public class TaskRecordControllerTest extends AbstractControllerTest { + private static final Logger logger = LoggerFactory.getLogger(TaskRecordControllerTest.class); @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java index 12b0715a35..c9463a82d9 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java @@ -14,11 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -28,15 +35,11 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - /** * tenant controller test */ public class TenantControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(TenantControllerTest.class); @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java index 6537288067..4f220be8cd 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java @@ -14,11 +14,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.utils.JSONUtils; + +import java.util.ArrayList; +import java.util.List; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -28,18 +38,11 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -import java.util.ArrayList; -import java.util.List; - /** * users controller test */ public class UsersControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(UsersControllerTest.class); @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageControllerTest.java index 7dea15d537..11fc2b5385 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageControllerTest.java @@ -14,11 +14,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -28,11 +34,11 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - +/** + * work flow lineage controller test + */ public class WorkFlowLineageControllerTest extends AbstractControllerTest { + private static Logger logger = LoggerFactory.getLogger(WorkFlowLineageControllerTest.class); @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java index f9a2ffc218..2ad07ff16c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java @@ -1,4 +1,3 @@ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -15,11 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.dolphinscheduler.api.controller; + +package org.apache.dolphinscheduler.api.controller; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -29,12 +35,11 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - +/** + * worker group controller test + */ public class WorkerGroupControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(WorkerGroupControllerTest.class); @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java index e10d7185f9..ff5257f0ed 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import static org.mockito.ArgumentMatchers.any; @@ -48,6 +49,9 @@ import org.slf4j.LoggerFactory; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * access token service test + */ @RunWith(MockitoJUnitRunner.class) public class AccessTokenServiceTest { @@ -59,7 +63,6 @@ public class AccessTokenServiceTest { @Mock private AccessTokenMapper accessTokenMapper; - @Test @SuppressWarnings("unchecked") public void testQueryAccessTokenList() { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java index b7b6575ed1..85115bbc28 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java @@ -21,9 +21,9 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.AlertGroupServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.AlertType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.AlertGroup; @@ -31,7 +31,6 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; @@ -48,13 +47,17 @@ import org.slf4j.LoggerFactory; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * alert group service test + */ @RunWith(MockitoJUnitRunner.class) public class AlertGroupServiceTest { private static final Logger logger = LoggerFactory.getLogger(AlertGroupServiceTest.class); @InjectMocks - private AlertGroupService alertGroupService; + private AlertGroupServiceImpl alertGroupService; + @Mock private AlertGroupMapper alertGroupMapper; @@ -64,7 +67,7 @@ public class AlertGroupServiceTest { public void testQueryAlertGroup() { Mockito.when(alertGroupMapper.queryAllGroupList()).thenReturn(getList()); - HashMap result = alertGroupService.queryAlertgroup(); + Map result = alertGroupService.queryAlertgroup(); logger.info(result.toString()); List alertGroups = (List) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(alertGroups)); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java index fb58070304..ae3896218c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java @@ -43,6 +43,9 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; +/** + * alert plugin instance service test + */ @RunWith(MockitoJUnitRunner.class) public class AlertPluginInstanceServiceTest { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java index 968dd470f6..0d962c979d 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java @@ -14,14 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.BaseServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.HashMap; +import java.util.Map; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -33,13 +39,10 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.mock.web.MockCookie; -import org.springframework.mock.web.MockHttpServletRequest; - -import javax.servlet.http.Cookie; -import java.util.HashMap; -import java.util.Map; +/** + * base service test + */ @RunWith(PowerMockRunner.class) @PowerMockIgnore({"sun.security.*", "javax.net.*"}) @PrepareForTest({HadoopUtils.class}) @@ -47,14 +50,14 @@ public class BaseServiceTest { private static final Logger logger = LoggerFactory.getLogger(BaseServiceTest.class); - private BaseService baseService; + private BaseServiceImpl baseService; @Mock private HadoopUtils hadoopUtils; @Before public void setUp() { - baseService = new BaseService(); + baseService = new BaseServiceImpl(); } @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java index e4f4a4e18c..5246b6ef09 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import static org.assertj.core.api.Assertions.assertThat; @@ -58,6 +59,9 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.powermock.modules.junit4.PowerMockRunner; +/** + * data analysis service test + */ @RunWith(PowerMockRunner.class) public class DataAnalysisServiceTest { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java index 13eb1b9c2e..d30d037d79 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.DataSourceServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbConnectType; @@ -50,16 +51,20 @@ import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +/** + * data source service test + */ @RunWith(PowerMockRunner.class) @PowerMockIgnore({"sun.security.*", "javax.net.*"}) @PrepareForTest({DataSourceFactory.class, CommonUtils.class}) public class DataSourceServiceTest { - @InjectMocks - private DataSourceService dataSourceService; + private DataSourceServiceImpl dataSourceService; + @Mock private DataSourceMapper dataSourceMapper; + @Mock private DataSourceUserMapper datasourceUserMapper; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java index 3f25fb84d4..b3a79ccd62 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java @@ -22,6 +22,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ExecutorServiceImpl; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; @@ -59,13 +60,13 @@ import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; /** - * test for ExecutorService + * executor service 2 test */ @RunWith(MockitoJUnitRunner.Silent.class) public class ExecutorService2Test { @InjectMocks - private ExecutorService executorService; + private ExecutorServiceImpl executorService; @Mock private ProcessService processService; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java index 57cd207c4d..2976568f8a 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java @@ -14,11 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ExecutorServiceImpl; import org.apache.dolphinscheduler.common.Constants; + +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; + import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; @@ -29,43 +36,37 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; -import java.text.MessageFormat; -import java.util.HashMap; -import java.util.Map; - +/** + * executor service test + */ @RunWith(SpringRunner.class) @SpringBootTest(classes = ApiApplicationServer.class) public class ExecutorServiceTest { + private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class); @Autowired - private ExecutorService executorService; + private ExecutorServiceImpl executorService; @Ignore @Test public void startCheckByProcessDefinedId(){ - Map map = executorService.startCheckByProcessDefinedId(1234); Assert.assertNull(map); - } - @Test public void putMsgWithParamsTest() { - Map map = new HashMap<>(); putMsgWithParams(map, Status.PROJECT_ALREADY_EXISTS); - logger.info(map.toString()); } - void putMsgWithParams(Map result, Status status,Object ... statusParams) { result.put(Constants.STATUS, status); - if(statusParams != null && statusParams.length > 0){ + if (statusParams != null && statusParams.length > 0) { result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); - }else { + } else { result.put(Constants.MSG, status.getMsg()); } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java index 3952a25542..bd8aa72fef 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; @@ -35,6 +36,9 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * logger service test + */ @RunWith(MockitoJUnitRunner.class) @PrepareForTest({LoggerServiceImpl.class}) public class LoggerServiceTest { @@ -43,6 +47,7 @@ public class LoggerServiceTest { @InjectMocks private LoggerServiceImpl loggerService; + @Mock private ProcessService processService; @@ -51,7 +56,6 @@ public class LoggerServiceTest { this.loggerService.init(); } - @Test public void testQueryDataSourceList() { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java index b155d5959a..c7c53fff74 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java @@ -14,15 +14,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.MonitorServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.MonitorDBDao; import org.apache.dolphinscheduler.dao.entity.MonitorRecord; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -33,21 +40,20 @@ import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - +/** + * monitor service test + */ @RunWith(MockitoJUnitRunner.class) public class MonitorServiceTest { private static final Logger logger = LoggerFactory.getLogger(MonitorServiceTest.class); @InjectMocks - private MonitorService monitorService; + private MonitorServiceImpl monitorService; + @Mock private MonitorDBDao monitorDBDao; - @Test public void testQueryDatabaseState(){ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java index 7ebb40ecb0..a4b3527e40 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java @@ -85,6 +85,9 @@ import org.springframework.web.multipart.MultipartFile; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * process definition service test + */ @RunWith(MockitoJUnitRunner.class) public class ProcessDefinitionServiceTest { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java index 5a8d09fdc5..1286877dad 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java @@ -45,6 +45,9 @@ import org.mockito.junit.MockitoJUnitRunner; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.google.common.collect.Lists; +/** + * process definition version service test + */ @RunWith(MockitoJUnitRunner.class) public class ProcessDefinitionVersionServiceTest { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java index de23d7570e..3ac5c69dcd 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java @@ -22,6 +22,7 @@ import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.LoggerServiceImpl; +import org.apache.dolphinscheduler.api.service.impl.ProcessInstanceServiceImpl; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -64,11 +65,14 @@ import org.mockito.junit.MockitoJUnitRunner; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * process instance service test + */ @RunWith(MockitoJUnitRunner.Silent.class) public class ProcessInstanceServiceTest { @InjectMocks - ProcessInstanceService processInstanceService; + ProcessInstanceServiceImpl processInstanceService; @Mock ProjectMapper projectMapper; @@ -91,9 +95,6 @@ public class ProcessInstanceServiceTest { @Mock ProcessDefinitionVersionService processDefinitionVersionService; - @Mock - ExecutorService execService; - @Mock TaskInstanceMapper taskInstanceMapper; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java index 076eb59b27..3cdf7142e6 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java @@ -50,6 +50,9 @@ import org.slf4j.LoggerFactory; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * project service test + **/ @RunWith(MockitoJUnitRunner.class) public class ProjectServiceTest { @@ -67,7 +70,6 @@ public class ProjectServiceTest { @Mock private ProcessDefinitionMapper processDefinitionMapper; - private String projectName = "ProjectServiceTest"; private String userName = "ProjectServiceTest"; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/QueueServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/QueueServiceTest.java index dbae95b181..10c6d486e7 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/QueueServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/QueueServiceTest.java @@ -14,11 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.QueueServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -28,6 +28,11 @@ import org.apache.dolphinscheduler.dao.entity.Queue; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.QueueMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -40,28 +45,32 @@ import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * queue service test + */ @RunWith(MockitoJUnitRunner.class) public class QueueServiceTest { private static final Logger logger = LoggerFactory.getLogger(QueueServiceTest.class); @InjectMocks - private QueueService queueService; + private QueueServiceImpl queueService; + @Mock private QueueMapper queueMapper; + @Mock private UserMapper userMapper; + private String queueName = "QueueServiceTest"; @Before public void setUp() { } - @After public void after(){ } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java index d430d3a755..8478b1b2bc 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -40,6 +41,7 @@ import org.apache.dolphinscheduler.dao.mapper.UserMapper; import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -61,29 +63,40 @@ import org.springframework.mock.web.MockMultipartFile; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * resources service test + */ @RunWith(PowerMockRunner.class) @PowerMockIgnore({"sun.security.*", "javax.net.*"}) @PrepareForTest({HadoopUtils.class, PropertyUtils.class, FileUtils.class, org.apache.dolphinscheduler.api.utils.FileUtils.class}) public class ResourcesServiceTest { + private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceTest.class); @InjectMocks - private ResourcesService resourcesService; + private ResourcesServiceImpl resourcesService; + @Mock private ResourceMapper resourcesMapper; + @Mock private TenantMapper tenantMapper; - @Mock - private ResourceUserMapper resourceUserMapper; + @Mock private HadoopUtils hadoopUtils; + @Mock private UserMapper userMapper; + @Mock private UdfFuncMapper udfFunctionMapper; + @Mock private ProcessDefinitionMapper processDefinitionMapper; + @Mock + private ResourceUserMapper resourceUserMapper; + @Before public void setUp() { @@ -313,6 +326,9 @@ public class ResourcesServiceTest { //SUCCESS loginUser.setTenantId(1); Mockito.when(hadoopUtils.delete(Mockito.anyString(), Mockito.anyBoolean())).thenReturn(true); + Mockito.when(processDefinitionMapper.listResources()).thenReturn(getResources()); + Mockito.when(resourcesMapper.deleteIds(Mockito.any())).thenReturn(1); + Mockito.when(resourceUserMapper.deleteResourceUserArray(Mockito.anyInt(), Mockito.any())).thenReturn(1); result = resourcesService.delete(loginUser, 1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); @@ -687,4 +703,13 @@ public class ResourcesServiceTest { contentList.add("test"); return contentList; } + + private List> getResources() { + List> resources = new ArrayList<>(); + Map resource = new HashMap<>(); + resource.put("id", 1); + resource.put("resource_ids", "1"); + resources.add(resource); + return resources; + } } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java index deadc2129c..49efc15694 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java @@ -14,10 +14,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; +import org.apache.dolphinscheduler.api.service.impl.SchedulerServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.model.Server; @@ -46,14 +48,15 @@ import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +/** + * scheduler service test + */ @RunWith(PowerMockRunner.class) @PrepareForTest(QuartzExecutors.class) - public class SchedulerServiceTest { - @InjectMocks - private SchedulerService schedulerService; + private SchedulerServiceImpl schedulerService; @Mock private MonitorService monitorService; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java index b51f85f456..4a950cd33e 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java @@ -14,12 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; -import java.util.List; +package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.service.impl.SessionServiceImpl; import org.apache.dolphinscheduler.common.Constants; @@ -29,6 +25,12 @@ import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.Session; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.SessionMapper; + +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.List; + import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -43,7 +45,9 @@ import org.slf4j.LoggerFactory; import org.springframework.mock.web.MockCookie; import org.springframework.mock.web.MockHttpServletRequest; - +/** + * session service test + */ @RunWith(MockitoJUnitRunner.class) public class SessionServiceTest { @@ -61,7 +65,6 @@ public class SessionServiceTest { public void setUp() { } - @After public void after(){ } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java index b1989b4e31..a746e6294f 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java @@ -24,6 +24,7 @@ import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; +import org.apache.dolphinscheduler.api.service.impl.TaskInstanceServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.UserType; @@ -50,19 +51,19 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.boot.test.context.SpringBootTest; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * task instance service test + */ @RunWith(MockitoJUnitRunner.Silent.class) @SpringBootTest(classes = ApiApplicationServer.class) public class TaskInstanceServiceTest { - private static final Logger logger = LoggerFactory.getLogger(TaskInstanceServiceTest.class); @InjectMocks - private TaskInstanceService taskInstanceService; + private TaskInstanceServiceImpl taskInstanceService; @Mock ProjectMapper projectMapper; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java index 2fea76d93f..bdd38df421 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java @@ -46,13 +46,16 @@ import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.context.i18n.LocaleContextHolder; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * tenant service test + */ @RunWith(MockitoJUnitRunner.class) public class TenantServiceTest { + private static final Logger logger = LoggerFactory.getLogger(TenantServiceTest.class); @InjectMocks diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java index 884e9b6b36..0f41848244 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java @@ -14,11 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.UdfFuncServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -32,6 +32,12 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -45,26 +51,30 @@ import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Map; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * udf func service test + */ @RunWith(PowerMockRunner.class) @PrepareForTest(PropertyUtils.class) public class UdfFuncServiceTest { + private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceTest.class); @InjectMocks - private UdfFuncService udfFuncService; + private UdfFuncServiceImpl udfFuncService; + @Mock private ResourceMapper resourceMapper; + @Mock private UdfFuncMapper udfFuncMapper; + @Mock private UDFUserMapper udfUserMapper; - @Before public void setUp() { PowerMockito.mockStatic(PropertyUtils.class); @@ -162,6 +172,8 @@ public class UdfFuncServiceTest { @Test public void testDelete(){ + Mockito.when(udfFuncMapper.deleteById(Mockito.anyInt())).thenReturn(1); + Mockito.when(udfUserMapper.deleteByUdfFuncId(Mockito.anyInt())).thenReturn(1); Result result= udfFuncService.delete(122); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UiPluginServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UiPluginServiceTest.java index 95e38b2ab9..e1f44583f0 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UiPluginServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UiPluginServiceTest.java @@ -36,7 +36,7 @@ import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; /** - * UiPluginServiceTest + * ui plugin service test */ @RunWith(MockitoJUnitRunner.class) public class UiPluginServiceTest { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java index ca6c7216b9..39b8cda3d4 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java @@ -22,6 +22,7 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.UsersServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -29,6 +30,7 @@ import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.EncryptionUtils; +import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; @@ -60,28 +62,40 @@ import org.slf4j.LoggerFactory; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +/** + * users service test + */ @RunWith(MockitoJUnitRunner.class) public class UsersServiceTest { + private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class); @InjectMocks - private UsersService usersService; + private UsersServiceImpl usersService; + @Mock private UserMapper userMapper; + @Mock private TenantMapper tenantMapper; + @Mock - private ProjectUserMapper projectUserMapper; - @Mock - private ResourceUserMapper resourcesUserMapper; + private ResourceMapper resourceMapper; + @Mock - private UDFUserMapper udfUserMapper; + private AlertGroupMapper alertGroupMapper; + @Mock private DataSourceUserMapper datasourceUserMapper; + @Mock - private AlertGroupMapper alertGroupMapper; + private ProjectUserMapper projectUserMapper; + @Mock - private ResourceMapper resourceMapper; + private ResourceUserMapper resourceUserMapper; + + @Mock + private UDFUserMapper udfUserMapper; private String queueName = "UsersServiceTestQueue"; @@ -291,6 +305,7 @@ public class UsersServiceTest { logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success + when(projectUserMapper.deleteProjectRelation(Mockito.anyInt(), Mockito.anyInt())).thenReturn(1); result = usersService.grantProject(loginUser, 1, projectIds); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); @@ -311,8 +326,8 @@ public class UsersServiceTest { Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success when(resourceMapper.queryAuthorizedResourceList(1)).thenReturn(new ArrayList()); - when(resourceMapper.selectById(Mockito.anyInt())).thenReturn(getResource()); + when(resourceUserMapper.deleteResourceUser(1, 0)).thenReturn(1); result = usersService.grantResources(loginUser, 1, resourceIds); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); @@ -334,6 +349,7 @@ public class UsersServiceTest { logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success + when(udfUserMapper.deleteByUserId(1)).thenReturn(1); result = usersService.grantUDFFunction(loginUser, 1, udfIds); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); @@ -353,6 +369,7 @@ public class UsersServiceTest { logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success + when(datasourceUserMapper.deleteByUserId(Mockito.anyInt())).thenReturn(1); result = usersService.grantDataSource(loginUser, 1, datasourceIds); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); @@ -383,6 +400,7 @@ public class UsersServiceTest { loginUser.setUserType(null); loginUser.setId(1); when(userMapper.queryDetailsById(1)).thenReturn(getGeneralUser()); + when(alertGroupMapper.queryByUserId(1)).thenReturn(getAlertGroups()); result = usersService.getUserInfo(loginUser); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); @@ -635,4 +653,11 @@ public class UsersServiceTest { return resource; } + private List getAlertGroups() { + List alertGroups = new ArrayList<>(); + AlertGroup alertGroup = new AlertGroup(); + alertGroups.add(alertGroup); + return alertGroups; + } + } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageServiceTest.java index 999e079bf5..f07ebac7d7 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageServiceTest.java @@ -14,13 +14,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.service.impl.WorkFlowLineageServiceImpl; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.EncryptionUtils; import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation; import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -28,15 +38,14 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; -import java.util.*; - -import static org.mockito.Mockito.when; - +/** + * work flow lineage service test + */ @RunWith(MockitoJUnitRunner.class) public class WorkFlowLineageServiceTest { @InjectMocks - private WorkFlowLineageService workFlowLineageService; + private WorkFlowLineageServiceImpl workFlowLineageService; @Mock private WorkFlowLineageMapper workFlowLineageMapper; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java index 4a1d874c8a..db9bb4fb00 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.service; +import org.apache.dolphinscheduler.api.service.impl.WorkerGroupServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; @@ -40,16 +41,15 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +/** + * worker group service test + */ @RunWith(MockitoJUnitRunner.class) public class WorkerGroupServiceTest { - private static final Logger logger = LoggerFactory.getLogger(WorkerGroupServiceTest.class); - @InjectMocks - private WorkerGroupService workerGroupService; + private WorkerGroupServiceImpl workerGroupService; @Mock private ProcessInstanceMapper processInstanceMapper; @@ -71,8 +71,8 @@ public class WorkerGroupServiceTest { Mockito.when(zookeeperCachedOperator.getChildrenKeys(workerPath)).thenReturn(workerGroupStrList); List defaultIpList = new ArrayList<>(); - defaultIpList.add("192.168.220.188:1234"); - defaultIpList.add("192.168.220.189:1234"); + defaultIpList.add("192.168.220.188:1234:100:1234567"); + defaultIpList.add("192.168.220.189:1234:100:1234567"); Mockito.when(zookeeperCachedOperator.getChildrenKeys(workerPath + "/default")).thenReturn(defaultIpList); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegexUtilsTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegexUtilsTest.java index 5b62d51b07..7ccfdfdb38 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegexUtilsTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegexUtilsTest.java @@ -36,4 +36,19 @@ public class RegexUtilsTest { Assert.assertFalse(numeric2); } + @Test + public void testEscapeNRT() { + String result1 = RegexUtils.escapeNRT("abc\n"); + Assert.assertEquals("abc_", result1); + + String result2 = RegexUtils.escapeNRT("abc\r"); + Assert.assertEquals("abc_", result2); + + String result3 = RegexUtils.escapeNRT("abc\t"); + Assert.assertEquals("abc_", result3); + + String result4 = RegexUtils.escapeNRT(null); + Assert.assertNull(result4); + } + } \ No newline at end of file diff --git a/dolphinscheduler-common/pom.xml b/dolphinscheduler-common/pom.xml index 8aeca37e93..3d21cc62e1 100644 --- a/dolphinscheduler-common/pom.xml +++ b/dolphinscheduler-common/pom.xml @@ -17,594 +17,594 @@ --> - 4.0.0 - - org.apache.dolphinscheduler - dolphinscheduler - 1.3.4-SNAPSHOT - - dolphinscheduler-common - dolphinscheduler-common + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + ${revision} + + dolphinscheduler-common + dolphinscheduler-common - jar - - UTF-8 - 3.1.0 - - - - org.apache.httpcomponents - httpclient - + jar + + UTF-8 + 3.1.0 + + + + org.apache.httpcomponents + httpclient + - - junit - junit - test - - - org.mockito - mockito-core - jar - test - + + junit + junit + test + + + org.mockito + mockito-core + jar + test + - - org.powermock - powermock-module-junit4 - test - + + org.powermock + powermock-module-junit4 + test + - - org.powermock - powermock-api-mockito2 - test - - - org.mockito - mockito-core - - - + + org.powermock + powermock-api-mockito2 + test + + + org.mockito + mockito-core + + + - - commons-configuration - commons-configuration - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-databind - - - org.apache.commons - commons-collections4 - - - commons-beanutils - commons-beanutils - - - org.apache.hadoop - hadoop-common - - - org.slf4j - slf4j-log4j12 - - - jdk.tools - jdk.tools - - - servlet-api - javax.servlet - - - javax.servlet - servlet-api - - - log4j - log4j - - - org.apache.curator - curator-client - + + commons-configuration + commons-configuration + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.commons + commons-collections4 + + + commons-beanutils + commons-beanutils + + + org.apache.hadoop + hadoop-common + + + org.slf4j + slf4j-log4j12 + + + jdk.tools + jdk.tools + + + servlet-api + javax.servlet + + + javax.servlet + servlet-api + + + log4j + log4j + + + org.apache.curator + curator-client + - - commons-configuration - commons-configuration - - - io.grpc - grpc-protobuf - - - io.netty - netty - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-mapper-asl - - - com.google.protobuf - jackson-mapper-asl - - - com.google.code.gson - gson - - - org.apache.commons - commons-math3 - - - xmlenc - xmlenc - - - commons-net - commons-net - - - org.apache.avro - avro - - - org.apache.zookeeper - zookeeper - - - jsr305 - com.google.code.findbugs - - - javax.servlet.jsp - jsp-api - - - jersey-json - com.sun.jersey - - - jersey-server - com.sun.jersey - - - jersey-core - com.sun.jersey - - - xz - org.tukaani - - - - - org.apache.hadoop - hadoop-client - - - org.slf4j - slf4j-log4j12 - - - servlet-api - javax.servlet - - - org.codehaus.jackson - jackson-jaxrs - - - org.codehaus.jackson - jackson-xc - + + commons-configuration + commons-configuration + + + io.grpc + grpc-protobuf + + + io.netty + netty + + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + + + com.google.protobuf + jackson-mapper-asl + + + com.google.code.gson + gson + + + org.apache.commons + commons-math3 + + + xmlenc + xmlenc + + + commons-net + commons-net + + + org.apache.avro + avro + + + org.apache.zookeeper + zookeeper + + + jsr305 + com.google.code.findbugs + + + javax.servlet.jsp + jsp-api + + + jersey-json + com.sun.jersey + + + jersey-server + com.sun.jersey + + + jersey-core + com.sun.jersey + + + xz + org.tukaani + + + + + org.apache.hadoop + hadoop-client + + + org.slf4j + slf4j-log4j12 + + + servlet-api + javax.servlet + + + org.codehaus.jackson + jackson-jaxrs + + + org.codehaus.jackson + jackson-xc + - - org.fusesource.leveldbjni - leveldbjni-all - - - org.apache.zookeeper - zookeeper - - - org.apache.hadoop - hadoop-mapreduce-client-shuffle - - - jersey-client - com.sun.jersey - - - jersey-core - com.sun.jersey - - - jaxb-api - javax.xml.bind - - - log4j - log4j - - - + + org.fusesource.leveldbjni + leveldbjni-all + + + org.apache.zookeeper + zookeeper + + + org.apache.hadoop + hadoop-mapreduce-client-shuffle + + + jersey-client + com.sun.jersey + + + jersey-core + com.sun.jersey + + + jaxb-api + javax.xml.bind + + + log4j + log4j + + + - - org.apache.hadoop - hadoop-hdfs - - - javax.servlet - servlet-api - - - io.netty - netty - - - com.google.protobuf - protobuf-java - - - xmlenc - xmlenc - - - io.netty - netty-all - - - org.fusesource.leveldbjni - leveldbjni-all - - - jersey-core - com.sun.jersey - - - jersey-server - com.sun.jersey - - - log4j - log4j - - - + + org.apache.hadoop + hadoop-hdfs + + + javax.servlet + servlet-api + + + io.netty + netty + + + com.google.protobuf + protobuf-java + + + xmlenc + xmlenc + + + io.netty + netty-all + + + org.fusesource.leveldbjni + leveldbjni-all + + + jersey-core + com.sun.jersey + + + jersey-server + com.sun.jersey + + + log4j + log4j + + + - - org.apache.hadoop - hadoop-aws - - - org.apache.hadoop - hadoop-common - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-annotations - - - + + org.apache.hadoop + hadoop-aws + + + org.apache.hadoop + hadoop-common + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.core + jackson-annotations + + + - - org.postgresql - postgresql - + + org.postgresql + postgresql + - - org.apache.hive - hive-jdbc - - - slf4j-log4j12 - org.slf4j - - - org.eclipse.jetty.aggregate - jetty-all - + + org.apache.hive + hive-jdbc + + + slf4j-log4j12 + org.slf4j + + + org.eclipse.jetty.aggregate + jetty-all + - - org.apache.ant - ant - - - io.dropwizard.metrics - metrics-json - - - io.dropwizard.metrics - metrics-jvm - - - com.github.joshelser - dropwizard-metrics-hadoop-metrics2-reporter - + + org.apache.ant + ant + + + io.dropwizard.metrics + metrics-json + + + io.dropwizard.metrics + metrics-jvm + + + com.github.joshelser + dropwizard-metrics-hadoop-metrics2-reporter + - - io.netty - netty-all - - - com.google.code.gson - gson - - - com.google.code.findbugs - jsr305 - - - io.dropwizard.metrics - metrics-core - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.commons - commons-compress - - - org.apache.curator - curator-client - - - org.apache.hadoop - hadoop-auth - - - org.apache.hadoop - hadoop-mapreduce-client-core - - - org.apache.hadoop - hadoop-yarn-api - + + io.netty + netty-all + + + com.google.code.gson + gson + + + com.google.code.findbugs + jsr305 + + + io.dropwizard.metrics + metrics-core + + + javax.servlet + servlet-api + + + org.apache.avro + avro + + + org.apache.commons + commons-compress + + + org.apache.curator + curator-client + + + org.apache.hadoop + hadoop-auth + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.apache.hadoop + hadoop-yarn-api + - - org.apache.zookeeper - zookeeper - - - org.codehaus.jackson - jackson-jaxrs - - - org.codehaus.jackson - jackson-xc - - - com.google.protobuf - protobuf-java - + + org.apache.zookeeper + zookeeper + + + org.codehaus.jackson + jackson-jaxrs + + + org.codehaus.jackson + jackson-xc + + + com.google.protobuf + protobuf-java + - - org.json - json - - - log4j-slf4j-impl - org.apache.logging.log4j - - - javax.servlet - org.eclipse.jetty.orbit - - - servlet-api-2.5 - org.mortbay.jetty - - - jasper-runtime - tomcat - - - slider-core - org.apache.slider - - - hbase-server - org.apache.hbase - - - jersey-client - com.sun.jersey - - - jersey-core - com.sun.jersey - - - jersey-json - com.sun.jersey - - - jersey-server - com.sun.jersey - - - jersey-guice - com.sun.jersey.contribs - - - hbase-common - org.apache.hbase - - - hbase-hadoop2-compat - org.apache.hbase - - - hbase-client - org.apache.hbase - - - hbase-hadoop-compat - org.apache.hbase - - - tephra-hbase-compat-1.0 - co.cask.tephra - - - jaxb-api - javax.xml.bind - - - hive-llap-client - org.apache.hive - - - hive-llap-common - org.apache.hive - - - hive-llap-server - org.apache.hive - - - tephra-core - co.cask.tephra - - - ant - ant - - - stringtemplate - org.antlr - - - antlr-runtime - org.antlr - - - hive-shims - org.apache.hive - - - jsp-api - javax.servlet - - - log4j-api - org.apache.logging.log4j - - - log4j-core - org.apache.logging.log4j - - - log4j-web - org.apache.logging.log4j - - - jasper-compiler - tomcat - - - + + org.json + json + + + log4j-slf4j-impl + org.apache.logging.log4j + + + javax.servlet + org.eclipse.jetty.orbit + + + servlet-api-2.5 + org.mortbay.jetty + + + jasper-runtime + tomcat + + + slider-core + org.apache.slider + + + hbase-server + org.apache.hbase + + + jersey-client + com.sun.jersey + + + jersey-core + com.sun.jersey + + + jersey-json + com.sun.jersey + + + jersey-server + com.sun.jersey + + + jersey-guice + com.sun.jersey.contribs + + + hbase-common + org.apache.hbase + + + hbase-hadoop2-compat + org.apache.hbase + + + hbase-client + org.apache.hbase + + + hbase-hadoop-compat + org.apache.hbase + + + tephra-hbase-compat-1.0 + co.cask.tephra + + + jaxb-api + javax.xml.bind + + + hive-llap-client + org.apache.hive + + + hive-llap-common + org.apache.hive + + + hive-llap-server + org.apache.hive + + + tephra-core + co.cask.tephra + + + ant + ant + + + stringtemplate + org.antlr + + + antlr-runtime + org.antlr + + + hive-shims + org.apache.hive + + + jsp-api + javax.servlet + + + log4j-api + org.apache.logging.log4j + + + log4j-core + org.apache.logging.log4j + + + log4j-web + org.apache.logging.log4j + + + jasper-compiler + tomcat + + + - - ch.qos.logback - logback-classic - - - ch.qos.logback - logback-core - - - com.github.oshi - oshi-core - + + ch.qos.logback + logback-classic + + + ch.qos.logback + logback-core + + + com.github.oshi + oshi-core + - - ru.yandex.clickhouse - clickhouse-jdbc - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - jaxb-api - javax.xml.bind - - - + + ru.yandex.clickhouse + clickhouse-jdbc + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + jaxb-api + javax.xml.bind + + + - - com.microsoft.sqlserver - mssql-jdbc - - - azure-keyvault - com.microsoft.azure - - - + + com.microsoft.sqlserver + mssql-jdbc + + + azure-keyvault + com.microsoft.azure + + + - - com.facebook.presto - presto-jdbc - + + com.facebook.presto + presto-jdbc + com.baomidou mybatis-plus-annotation - ${mybatis-plus.version} - compile + ${mybatis-plus.version} + compile - - org.codehaus.janino - janino - ${codehaus.janino.version} - - - com.github.rholder - guava-retrying - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - - + + org.codehaus.janino + janino + ${codehaus.janino.version} + + + com.github.rholder + guava-retrying + + + com.google.guava + guava + + + com.google.code.findbugs + jsr305 + + + + diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java index 45af3b2700..0297ae6534 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java @@ -752,9 +752,17 @@ public final class Constants { public static final String SUBTRACT_STRING = "-"; public static final String GLOBAL_PARAMS = "globalParams"; public static final String LOCAL_PARAMS = "localParams"; + public static final String LOCAL_PARAMS_LIST = "localParamsList"; + public static final String SUBPROCESS_INSTANCE_ID = "subProcessInstanceId"; public static final String PROCESS_INSTANCE_STATE = "processInstanceState"; + public static final String PARENT_WORKFLOW_INSTANCE = "parentWorkflowInstance"; + public static final String TASK_TYPE = "taskType"; public static final String TASK_LIST = "taskList"; public static final String RWXR_XR_X = "rwxr-xr-x"; + public static final String QUEUE = "queue"; + public static final String QUEUE_NAME = "queueName"; + public static final int LOG_QUERY_SKIP_LINE_NUMBER = 0; + public static final int LOG_QUERY_LIMIT = 4096; /** * master/worker server use for zk @@ -898,6 +906,12 @@ public final class Constants { public static final String TOTAL = "total"; + /** + * workflow + */ + public static final String WORKFLOW_LIST = "workFlowList"; + public static final String WORKFLOW_RELATION_LIST = "workFlowRelationList"; + /** * session user */ @@ -1004,11 +1018,13 @@ public final class Constants { */ public static final String PLUGIN_JAR_SUFFIX = ".jar"; - public static final int NORAML_NODE_STATUS = 0; + public static final int NORMAL_NODE_STATUS = 0; public static final int ABNORMAL_NODE_STATUS = 1; public static final String START_TIME = "start time"; public static final String END_TIME = "end time"; + public static final String START_END_DATE = "startDate,endDate"; + /** * system line separator */ diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java index a5312995ac..c484dc0442 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java @@ -259,6 +259,9 @@ public class DateUtils { * @return format time */ public static String format2Duration(Date d1, Date d2) { + if (d1 == null || d2 == null) { + return null; + } return format2Duration(differMs(d1, d2)); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java index 6c761f3d00..df4fb98171 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java @@ -72,6 +72,17 @@ public class NetUtils { return getAddr(getHost(), port); } + /** + * get host + * @return host + */ + public static String getHost(InetAddress inetAddress) { + if (inetAddress != null) { + return Constants.KUBERNETES_MODE ? inetAddress.getHostName() : inetAddress.getHostAddress(); + } + return null; + } + public static String getHost() { if (HOST_ADDRESS != null) { return HOST_ADDRESS; @@ -79,7 +90,7 @@ public class NetUtils { InetAddress address = getLocalAddress(); if (address != null) { - HOST_ADDRESS = Constants.KUBERNETES_MODE ? address.getHostName() : address.getHostAddress(); + HOST_ADDRESS = getHost(address); return HOST_ADDRESS; } return Constants.KUBERNETES_MODE ? "localhost" : "127.0.0.1"; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java index 9edf7939db..0ca80b1231 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java @@ -73,7 +73,7 @@ public class PropertyUtils { /** * @return judge whether resource upload startup */ - public static Boolean getResUploadStartupState() { + public static boolean getResUploadStartupState() { String resUploadStartupType = PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE); ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); return resUploadType == ResUploadType.HDFS || resUploadType == ResUploadType.S3; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java index 6bed928e14..f506f76627 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java @@ -50,14 +50,6 @@ public class StringUtils { return !isBlank(s); } - public static String replaceNRTtoUnderline(String src) { - if (isBlank(src)) { - return src; - } else { - return src.replaceAll("[\n|\r|\t]", "_"); - } - } - public static String trim(String str) { return str == null ? null : str.trim(); } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DateUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DateUtilsTest.java index 63f0be5906..4a88085d19 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DateUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DateUtilsTest.java @@ -153,7 +153,7 @@ public class DateUtilsTest { @Test public void getCurrentTimeStamp() { - String timeStamp = DateUtils.getCurrentTimeStamp(); + String timeStamp = DateUtils.getCurrentTimeStamp(); Assert.assertNotNull(timeStamp); } @@ -196,4 +196,12 @@ public class DateUtilsTest { } + @Test + public void testNullDuration() { + // days hours minutes seconds + Date d1 = DateUtils.stringToDate("2020-01-20 11:00:00"); + Date d2 = null; + Assert.assertNull(DateUtils.format2Duration(d1, d2)); + } + } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java index 74b96aff02..3f5aeda3f9 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java @@ -63,21 +63,6 @@ public class StringUtilsTest { Assert.assertTrue(b); } - @Test - public void testreplaceNRTtoUnderline() { - String result1 = StringUtils.replaceNRTtoUnderline("abc\n"); - Assert.assertEquals("abc_", result1); - - String result2 = StringUtils.replaceNRTtoUnderline("abc\r"); - Assert.assertEquals("abc_", result2); - - String result3 = StringUtils.replaceNRTtoUnderline("abc\t"); - Assert.assertEquals("abc_", result3); - - String result4 = StringUtils.replaceNRTtoUnderline(null); - Assert.assertNull(result4); - } - @Test public void testTrim() { String trim = StringUtils.trim(null); diff --git a/dolphinscheduler-dao/pom.xml b/dolphinscheduler-dao/pom.xml index 6bdaf416d9..4fff6575dc 100644 --- a/dolphinscheduler-dao/pom.xml +++ b/dolphinscheduler-dao/pom.xml @@ -17,139 +17,139 @@ --> - 4.0.0 - - org.apache.dolphinscheduler - dolphinscheduler - 1.3.4-SNAPSHOT - - dolphinscheduler-dao - ${project.artifactId} + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + ${revision} + + dolphinscheduler-dao + ${project.artifactId} - - UTF-8 - - - - junit - junit - test - - - com.baomidou - mybatis-plus - ${mybatis-plus.version} - - - com.baomidou - mybatis-plus-boot-starter - ${mybatis-plus.version} - - - org.apache.logging.log4j - log4j-to-slf4j - - - - - org.postgresql - postgresql - - - - org.springframework.boot - spring-boot-starter-test - test - - - org.ow2.asm - asm - - - org.springframework.boot - spring-boot - - - org.springframework.boot - spring-boot-autoconfigure - - - log4j-api - org.apache.logging.log4j - - - org.springframework.boot - spring-boot-starter-tomcat - - - org.apache.logging.log4j - log4j-to-slf4j - - - + + UTF-8 + + + + junit + junit + test + + + com.baomidou + mybatis-plus + ${mybatis-plus.version} + + + com.baomidou + mybatis-plus-boot-starter + ${mybatis-plus.version} + + + org.apache.logging.log4j + log4j-to-slf4j + + + + + org.postgresql + postgresql + - - mysql - mysql-connector-java - - - com.h2database - h2 - - - com.alibaba - druid - + + org.springframework.boot + spring-boot-starter-test + test + + + org.ow2.asm + asm + + + org.springframework.boot + spring-boot + + + org.springframework.boot + spring-boot-autoconfigure + + + log4j-api + org.apache.logging.log4j + + + org.springframework.boot + spring-boot-starter-tomcat + + + org.apache.logging.log4j + log4j-to-slf4j + + + - - ch.qos.logback - logback-classic - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-databind - - - org.apache.httpcomponents - httpclient - - - commons-httpclient - commons-httpclient - + + mysql + mysql-connector-java + + + com.h2database + h2 + + + com.alibaba + druid + - - com.cronutils - cron-utils - + + ch.qos.logback + logback-classic + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.httpcomponents + httpclient + + + commons-httpclient + commons-httpclient + + + + com.cronutils + cron-utils + commons-configuration commons-configuration - - org.apache.dolphinscheduler - dolphinscheduler-common - - - protobuf-java - com.google.protobuf - - - + + org.apache.dolphinscheduler + dolphinscheduler-common + + + protobuf-java + com.google.protobuf + + + org.springframework spring-test test - - org.yaml - snakeyaml - - + + org.yaml + snakeyaml + + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java index b334603a1a..fd0fba9e57 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java @@ -35,7 +35,6 @@ public class ResourceProcessDefinitionUtils { Map> result = new HashMap<>(); if (CollectionUtils.isNotEmpty(list)) { for (Map tempMap : list) { - map.put((Integer) tempMap.get("id"), (String)tempMap.get("resource_ids")); } } diff --git a/dolphinscheduler-dist/pom.xml b/dolphinscheduler-dist/pom.xml index 1ec74fd409..d70bbf7491 100644 --- a/dolphinscheduler-dist/pom.xml +++ b/dolphinscheduler-dist/pom.xml @@ -20,7 +20,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-microbench/pom.xml b/dolphinscheduler-microbench/pom.xml index 606ecd3c38..7635eae18d 100644 --- a/dolphinscheduler-microbench/pom.xml +++ b/dolphinscheduler-microbench/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-remote/pom.xml b/dolphinscheduler-remote/pom.xml index 3ac7b914a5..a63f3f955c 100644 --- a/dolphinscheduler-remote/pom.xml +++ b/dolphinscheduler-remote/pom.xml @@ -20,7 +20,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java index a1ffb8768e..239a3993c0 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.remote.utils; +import org.apache.dolphinscheduler.common.utils.NetUtils; + import java.net.InetSocketAddress; import io.netty.channel.Channel; @@ -37,7 +39,7 @@ public class ChannelUtils { * @return local address */ public static String getLocalAddress(Channel channel) { - return ((InetSocketAddress) channel.localAddress()).getAddress().getHostAddress(); + return NetUtils.getHost(((InetSocketAddress) channel.localAddress()).getAddress()); } /** @@ -47,7 +49,7 @@ public class ChannelUtils { * @return remote address */ public static String getRemoteAddress(Channel channel) { - return ((InetSocketAddress) channel.remoteAddress()).getAddress().getHostAddress(); + return NetUtils.getHost(((InetSocketAddress) channel.remoteAddress()).getAddress()); } /** @@ -58,7 +60,7 @@ public class ChannelUtils { */ public static Host toAddress(Channel channel) { InetSocketAddress socketAddress = ((InetSocketAddress) channel.remoteAddress()); - return new Host(socketAddress.getAddress().getHostAddress(), socketAddress.getPort()); + return new Host(NetUtils.getHost(socketAddress.getAddress()), socketAddress.getPort()); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java index c18d02f09a..7e42984e49 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java @@ -17,8 +17,11 @@ package org.apache.dolphinscheduler.remote.utils; +import static org.apache.dolphinscheduler.common.Constants.COLON; + import java.io.Serializable; import java.util.Objects; +import java.util.StringJoiner; /** * server address @@ -61,13 +64,13 @@ public class Host implements Serializable { public Host(String ip, int port) { this.ip = ip; this.port = port; - this.address = ip + ":" + port; + this.address = ip + COLON + port; } public Host(String ip, int port, int weight, long startTime) { this.ip = ip; this.port = port; - this.address = ip + ":" + port; + this.address = ip + COLON + port; this.weight = getWarmUpWeight(weight, startTime); this.startTime = startTime; } @@ -75,7 +78,7 @@ public class Host implements Serializable { public Host(String ip, int port, int weight, long startTime, String workGroup) { this.ip = ip; this.port = port; - this.address = ip + ":" + port; + this.address = ip + COLON + port; this.weight = getWarmUpWeight(weight, startTime); this.workGroup = workGroup; this.startTime = startTime; @@ -95,7 +98,7 @@ public class Host implements Serializable { public void setIp(String ip) { this.ip = ip; - this.address = ip + ":" + port; + this.address = ip + COLON + port; } public int getWeight() { @@ -120,7 +123,7 @@ public class Host implements Serializable { public void setPort(int port) { this.port = port; - this.address = ip + ":" + port; + this.address = ip + COLON + port; } public String getWorkGroup() { @@ -141,7 +144,7 @@ public class Host implements Serializable { if (address == null) { throw new IllegalArgumentException("Host : address is null."); } - String[] parts = address.split(":"); + String[] parts = address.split(COLON); if (parts.length < 2) { throw new IllegalArgumentException(String.format("Host : %s illegal.", address)); } @@ -155,6 +158,21 @@ public class Host implements Serializable { return host; } + /** + * generate host string + * @param address address + * @param weight weight + * @param startTime startTime + * @return address:weight:startTime + */ + public static String generate(String address, int weight, long startTime) { + StringJoiner stringJoiner = new StringJoiner(COLON); + stringJoiner.add(address) + .add(String.valueOf(weight)) + .add(String.valueOf(startTime)); + return stringJoiner.toString(); + } + /** * whether old version * @@ -162,7 +180,7 @@ public class Host implements Serializable { * @return old version is true , otherwise is false */ public static Boolean isOldVersion(String address) { - String[] parts = address.split(":"); + String[] parts = address.split(COLON); return parts.length != 2 && parts.length != 3; } @@ -186,8 +204,11 @@ public class Host implements Serializable { @Override public String toString() { return "Host{" - + "address='" + address + '\'' - + '}'; + + "address='" + address + '\'' + + ", weight=" + weight + + ", startTime=" + startTime + + ", workGroup='" + workGroup + '\'' + + '}'; } /** @@ -201,4 +222,13 @@ public class Host implements Serializable { } return weight; } + + /** + * get address and weight + * + * @return address:weight + */ + public String getAddressAndWeight() { + return address + COLON + weight; + } } diff --git a/dolphinscheduler-server/pom.xml b/dolphinscheduler-server/pom.xml index 9655b290c5..793f72fde6 100644 --- a/dolphinscheduler-server/pom.xml +++ b/dolphinscheduler-server/pom.xml @@ -22,7 +22,7 @@ org.apache.dolphinscheduler dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} dolphinscheduler-server dolphinscheduler-server diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java index bd8c79cce9..b89d85126f 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java @@ -19,12 +19,13 @@ package org.apache.dolphinscheduler.server.registry; import static org.apache.dolphinscheduler.remote.utils.Constants.COMMA; -import java.util.Date; -import java.util.Set; - import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; + +import java.util.Date; +import java.util.Set; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,7 +57,7 @@ public class HeartBeatTask extends Thread { double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); double loadAverage = OSUtils.loadAverage(); - int status = Constants.NORAML_NODE_STATUS; + int status = Constants.NORMAL_NODE_STATUS; if (availablePhysicalMemorySize < reservedMemory || loadAverage > maxCpuloadAvg) { diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java index e779d5deb3..3d4d73f51a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java @@ -17,13 +17,13 @@ package org.apache.dolphinscheduler.server.worker.registry; -import static org.apache.dolphinscheduler.common.Constants.COLON; import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; import static org.apache.dolphinscheduler.common.Constants.SLASH; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; import org.apache.dolphinscheduler.server.registry.HeartBeatTask; import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; @@ -146,8 +146,8 @@ public class WorkerRegistry { String address = getLocalAddress(); String workerZkPathPrefix = this.zookeeperRegistryCenter.getWorkerPath(); - String weight = getWorkerWeight(); - String workerStartTime = COLON + System.currentTimeMillis(); + int weight = workerConfig.getWeight(); + long workerStartTime = System.currentTimeMillis(); for (String workGroup : this.workerGroups) { StringBuilder workerZkPathBuilder = new StringBuilder(100); @@ -157,9 +157,7 @@ public class WorkerRegistry { } // trim and lower case is need workerZkPathBuilder.append(workGroup.trim().toLowerCase()).append(SLASH); - workerZkPathBuilder.append(address); - workerZkPathBuilder.append(weight); - workerZkPathBuilder.append(workerStartTime); + workerZkPathBuilder.append(Host.generate(address, weight, workerStartTime)); workerZkPaths.add(workerZkPathBuilder.toString()); } return workerZkPaths; @@ -172,11 +170,4 @@ public class WorkerRegistry { return NetUtils.getAddr(workerConfig.getListenPort()); } - /** - * get Worker Weight - */ - private String getWorkerWeight() { - return COLON + workerConfig.getWeight(); - } - } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java index f6d4d0d4bb..37484dafa5 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java @@ -14,12 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.zk; -import org.apache.commons.lang.StringUtils; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.cache.TreeCacheEvent; -import org.apache.curator.framework.recipes.locks.InterProcessMutex; +import static org.apache.dolphinscheduler.common.Constants.SLEEP_TIME_MILLIS; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; @@ -27,24 +25,27 @@ import org.apache.dolphinscheduler.common.enums.ZKNodeType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.zk.AbstractZKClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.recipes.cache.TreeCacheEvent; +import org.apache.curator.framework.recipes.locks.InterProcessMutex; import java.util.Date; import java.util.List; -import static org.apache.dolphinscheduler.common.Constants.SLEEP_TIME_MILLIS; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; /** * zookeeper master client @@ -134,9 +135,9 @@ public class ZKMasterClient extends AbstractZKClient { mutex.acquire(); String serverHost = null; - if(StringUtils.isNotEmpty(path)){ + if (StringUtils.isNotEmpty(path)) { serverHost = getHostByEventDataPath(path); - if(StringUtils.isEmpty(serverHost)){ + if (StringUtils.isEmpty(serverHost)) { logger.error("server down error: unknown path: {}", path); return; } @@ -305,8 +306,8 @@ public class ZKMasterClient extends AbstractZKClient { * @throws Exception exception */ private void failoverWorker(String workerHost, boolean needCheckWorkerAlive) throws Exception { + workerHost = Host.of(workerHost).getAddress(); logger.info("start worker[{}] failover ...", workerHost); - List needFailoverTaskInstanceList = processService.queryNeedFailoverTaskInstances(workerHost); for (TaskInstance taskInstance : needFailoverTaskInstanceList) { if (needCheckWorkerAlive) { diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java index 7763e07314..a180f51576 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.master.registry; import static org.apache.dolphinscheduler.common.Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH; +import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.remote.utils.Constants; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; @@ -59,7 +60,7 @@ public class MasterRegistryTest { masterRegistry.registry(); String masterPath = zookeeperRegistryCenter.getMasterPath(); TimeUnit.SECONDS.sleep(masterConfig.getMasterHeartbeatInterval() + 2); //wait heartbeat info write into zk node - String masterNodePath = masterPath + "/" + (Constants.LOCAL_ADDRESS + ":" + masterConfig.getListenPort()); + String masterNodePath = masterPath + "/" + (NetUtils.getAddr(Constants.LOCAL_ADDRESS, masterConfig.getListenPort())); String heartbeat = zookeeperRegistryCenter.getZookeeperCachedOperator().get(masterNodePath); Assert.assertEquals(HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH, heartbeat.split(",").length); masterRegistry.unRegistry(); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/HostTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/HostTest.java index 6273569485..80ff11e0be 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/HostTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/HostTest.java @@ -40,4 +40,13 @@ public class HostTest { Host host = Host.of("192.158.2.2:22"); Assert.assertEquals(22, host.getPort()); } + + @Test + public void testGenerate() { + String address = "192.158.2.2:22"; + int weight = 100; + long startTime = System.currentTimeMillis(); + String generateHost = Host.generate(address, weight, startTime); + Assert.assertEquals(address + ":" + weight + ":" + startTime, generateHost); + } } diff --git a/dolphinscheduler-service/pom.xml b/dolphinscheduler-service/pom.xml index fd1d2737de..d0ed415298 100644 --- a/dolphinscheduler-service/pom.xml +++ b/dolphinscheduler-service/pom.xml @@ -20,7 +20,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java index b9065eca3e..fe6c7fdbde 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java @@ -260,8 +260,8 @@ public class ProcessService { * @param command command * @return create command result */ - public Boolean verifyIsNeedCreateCommand(Command command) { - Boolean isNeedCreate = true; + public boolean verifyIsNeedCreateCommand(Command command) { + boolean isNeedCreate = true; EnumMap cmdTypeMap = new EnumMap<>(CommandType.class); cmdTypeMap.put(CommandType.REPEAT_RUNNING, 1); cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, 1); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java index d7a902550f..5ce42242ee 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java @@ -45,6 +45,7 @@ public class PeerTaskInstancePriorityQueue implements TaskPriorityQueue { diff --git a/dolphinscheduler-spi/pom.xml b/dolphinscheduler-spi/pom.xml index 3f18c48b0e..cde1c71169 100644 --- a/dolphinscheduler-spi/pom.xml +++ b/dolphinscheduler-spi/pom.xml @@ -20,7 +20,7 @@ org.apache.dolphinscheduler dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} dolphinscheduler-spi ${project.artifactId} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertInfo.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertInfo.java index d6e54561e7..8f61fe2f9d 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertInfo.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertInfo.java @@ -27,7 +27,7 @@ public class AlertInfo { /** * all params this plugin need is in alertProps */ - private Map alertParams; + private Map alertParams; /** * the alert content diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/PluginParamsTransfer.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/PluginParamsTransfer.java index c9b14b9f8e..3e709adfd4 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/PluginParamsTransfer.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/PluginParamsTransfer.java @@ -17,6 +17,9 @@ package org.apache.dolphinscheduler.spi.params; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_FIELD; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_VALUE; + import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.utils.JSONUtils; @@ -24,6 +27,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import com.fasterxml.jackson.core.type.TypeReference; + /** * plugin params pojo and json transfer tool */ @@ -44,8 +49,36 @@ public class PluginParamsTransfer { List pluginParams = transferJsonToParamsList(paramsJsonStr); Map paramsMap = new HashMap<>(); for (PluginParams param : pluginParams) { - paramsMap.put(param.getName(), null != param.getValue() ? param.getValue().toString() : null); + paramsMap.put(param.getName(), param.getValue() == null ? null : param.getValue().toString()); } return paramsMap; } + + /** + * generate Plugin Params + * + * @param paramsJsonStr paramsJsonStr value + * @param pluginParamsTemplate pluginParamsTemplate + * @return return plugin params value + */ + public static List> generatePluginParams(String paramsJsonStr, String pluginParamsTemplate) { + Map paramsMap = JSONUtils.toMap(paramsJsonStr); + return generatePluginParams(paramsMap, pluginParamsTemplate); + } + + /** + * generate Plugin Params + * + * @param paramsMap paramsMap + * @param pluginParamsTemplate pluginParamsTemplate + * @return return plugin params value + */ + public static List> generatePluginParams(Map paramsMap, String pluginParamsTemplate) { + if (paramsMap == null || paramsMap.isEmpty()) { + return null; + } + List> pluginParamsList = JSONUtils.parseObject(pluginParamsTemplate, new TypeReference>>() {}); + pluginParamsList.forEach(pluginParams -> pluginParams.put(STRING_PLUGIN_PARAM_VALUE, paramsMap.get(pluginParams.get(STRING_PLUGIN_PARAM_FIELD)))); + return pluginParamsList; + } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/RadioParam.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/RadioParam.java index 824c7fe6f3..2822dfd3df 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/RadioParam.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/RadioParam.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.spi.params; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_OPTIONS; + import org.apache.dolphinscheduler.spi.params.base.FormType; import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; import org.apache.dolphinscheduler.spi.params.base.PluginParams; @@ -32,7 +34,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; */ public class RadioParam extends PluginParams { - @JsonProperty("options") + @JsonProperty(STRING_PLUGIN_PARAM_OPTIONS) private List paramsOptionsList; private RadioParam(Builder builder) { diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/PluginParams.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/PluginParams.java index 34d60a26c4..3815528abe 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/PluginParams.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/PluginParams.java @@ -17,6 +17,14 @@ package org.apache.dolphinscheduler.spi.params.base; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_FIELD; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_NAME; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_PROPS; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_TITLE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_TYPE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_VALIDATE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_VALUE; + import static java.util.Objects.requireNonNull; import java.util.List; @@ -35,34 +43,34 @@ public class PluginParams { /** * param name */ - @JsonProperty("field") + @JsonProperty(STRING_PLUGIN_PARAM_FIELD) protected String name; /** * param name */ - @JsonProperty("name") + @JsonProperty(STRING_PLUGIN_PARAM_NAME) protected String fieldName; - @JsonProperty("props") + @JsonProperty(STRING_PLUGIN_PARAM_PROPS) protected ParamsProps props; - @JsonProperty("type") + @JsonProperty(STRING_PLUGIN_PARAM_TYPE) protected String formType; /** * Name displayed on the page */ - @JsonProperty("title") + @JsonProperty(STRING_PLUGIN_PARAM_TITLE) protected String title; /** * default value or value input by user in the page */ - @JsonProperty("value") + @JsonProperty(STRING_PLUGIN_PARAM_VALUE) protected Object value; - @JsonProperty("validate") + @JsonProperty(STRING_PLUGIN_PARAM_VALIDATE) protected List validateList; protected PluginParams(Builder builder) { diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java new file mode 100644 index 0000000000..d653d4e435 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.utils; + +/** + * constants + */ +public class Constants { + private Constants() { + throw new IllegalStateException("Constants class"); + } + + /** alert plugin param field string **/ + public static final String STRING_PLUGIN_PARAM_FIELD = "field"; + /** alert plugin param name string **/ + public static final String STRING_PLUGIN_PARAM_NAME = "name"; + /** alert plugin param props string **/ + public static final String STRING_PLUGIN_PARAM_PROPS = "props"; + /** alert plugin param type string **/ + public static final String STRING_PLUGIN_PARAM_TYPE = "type"; + /** alert plugin param title string **/ + public static final String STRING_PLUGIN_PARAM_TITLE = "title"; + /** alert plugin param value string **/ + public static final String STRING_PLUGIN_PARAM_VALUE = "value"; + /** alert plugin param validate string **/ + public static final String STRING_PLUGIN_PARAM_VALIDATE = "validate"; + /** alert plugin param options string **/ + public static final String STRING_PLUGIN_PARAM_OPTIONS = "options"; + + + /** string true */ + public static final String STRING_TRUE = "true"; + /** string false */ + public static final String STRING_FALSE = "false"; + /** string yes */ + public static final String STRING_YES = "YES"; + /** string no */ + public static final String STRING_NO = "NO"; + +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/JSONUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/JSONUtils.java index 89d6e50ea5..e87e2102ee 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/JSONUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/JSONUtils.java @@ -24,11 +24,13 @@ import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.TimeZone; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.SerializationFeature; @@ -124,6 +126,38 @@ public class JSONUtils { return Collections.emptyList(); } + /** + * json to map + * + * @param json json + * @return json to map + */ + public static Map toMap(String json) { + return parseObject(json, new TypeReference>() {}); + } + + /** + * json to object + * + * @param json json string + * @param type type reference + * @param + * @return return parse object + */ + public static T parseObject(String json, TypeReference type) { + if (StringUtils.isEmpty(json)) { + return null; + } + + try { + return objectMapper.readValue(json, type); + } catch (Exception e) { + logger.error("json to map exception!", e); + } + + return null; + } + /** * object to json string * diff --git a/dolphinscheduler-ui/pom.xml b/dolphinscheduler-ui/pom.xml index 1263502cc3..fe715648e7 100644 --- a/dolphinscheduler-ui/pom.xml +++ b/dolphinscheduler-ui/pom.xml @@ -20,7 +20,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.4-SNAPSHOT + ${revision} 4.0.0 diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue index e148119b98..5caa1f8779 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue @@ -63,25 +63,25 @@ diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningInstance/_source/createWarningInstance.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningInstance/_source/createWarningInstance.vue index b46e477803..07c9faa8ae 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningInstance/_source/createWarningInstance.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningInstance/_source/createWarningInstance.vue @@ -129,28 +129,36 @@ }) }, _submit () { - this.$f.rule.forEach(item => { - item.title = item.name - }) - let param = { - instanceName: this.instanceName, - pluginDefineId: this.pluginDefineId, - pluginInstanceParams: JSON.stringify(this.$f.rule) - } - if (this.item) { - param.alertPluginInstanceId = this.item.id - param.pluginDefineId = null - } - this.$refs.popover.spinnerLoading = true - this.store.dispatch(`security/${this.item ? 'updateAlertPluginInstance' : 'createAlertPluginInstance'}`, param).then(res => { - this.$refs.popover.spinnerLoading = false - this.$emit('onUpdate') - this.$message.success(res.msg) - }).catch(e => { - this.$message.error(e.msg || '') - this.$refs.popover.spinnerLoading = false + this.$f.validate((valid) => { + if (valid) { + this.$f.rule.forEach(item => { + item.title = item.name + }) + let param = { + instanceName: this.instanceName, + pluginDefineId: this.pluginDefineId, + pluginInstanceParams: JSON.stringify(this.$f.rule) + } + if (this.item) { + param.alertPluginInstanceId = this.item.id + param.pluginDefineId = null + } + this.$refs.popover.spinnerLoading = true + this.store.dispatch(`security/${this.item ? 'updateAlertPluginInstance' : 'createAlertPluginInstance'}`, param).then(res => { + this.$refs.popover.spinnerLoading = false + this.$emit('onUpdate') + this.$message.success(res.msg) + }).catch(e => { + this.$message.error(e.msg || '') + this.$refs.popover.spinnerLoading = false + }) + } else { + this.$message.warning(`${i18n.$t('Instance parameter exception')}`) + this.$refs.popover.spinnerLoading = false + } }) }, + close () { this.$emit('close') } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningInstance/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningInstance/_source/list.vue index 5a8cf2ad1b..d3c508bcff 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningInstance/_source/list.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningInstance/_source/list.vue @@ -20,6 +20,7 @@ +