Browse Source

Merge remote-tracking branch 'upstream/dev' into params-trans

pull/3/MERGE
lenboo 3 years ago
parent
commit
59811d0efb
  1. 11
      dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java
  2. 23
      dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java
  3. 11
      dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java
  4. 4
      dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java
  5. 6
      dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java
  6. 14
      dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java
  7. 2
      dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java
  8. 4
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java
  9. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java
  10. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java
  11. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java
  12. 31
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java
  13. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java
  14. 7
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
  15. 7
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java
  16. 36
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java
  17. 25
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java
  18. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java
  19. 41
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java
  20. 42
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java
  21. 32
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java
  22. 10
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java
  23. 10
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
  24. 21
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java
  25. 17
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java
  26. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java
  27. 4
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UiPluginController.java
  28. 48
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java
  29. 46
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java
  30. 30
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java
  31. 1
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java
  32. 140
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java
  33. 82
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java
  34. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java
  35. 544
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java
  36. 509
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java
  37. 3
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java
  38. 168
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java
  39. 3
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java
  40. 610
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java
  41. 1
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java
  42. 229
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java
  43. 1200
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
  44. 509
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
  45. 5
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java
  46. 154
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java
  47. 48
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java
  48. 261
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java
  49. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UiPluginService.java
  50. 875
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java
  51. 83
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageService.java
  52. 140
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java
  53. 14
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java
  54. 207
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java
  55. 23
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java
  56. 136
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java
  57. 10
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java
  58. 659
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java
  59. 578
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java
  60. 3
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java
  61. 160
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java
  62. 23
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  63. 7
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java
  64. 740
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java
  65. 5
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java
  66. 294
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java
  67. 1312
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java
  68. 599
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java
  69. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java
  70. 207
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java
  71. 85
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskRecordServiceImpl.java
  72. 21
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java
  73. 325
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java
  74. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java
  75. 1070
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java
  76. 108
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java
  77. 179
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java
  78. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java
  79. 18
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java
  80. 21
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java
  81. 17
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertGroupControllerTest.java
  82. 27
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java
  83. 15
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java
  84. 1
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java
  85. 13
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java
  86. 14
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java
  87. 13
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java
  88. 1
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java
  89. 18
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java
  90. 14
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java
  91. 15
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java
  92. 15
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java
  93. 16
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java
  94. 15
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java
  95. 19
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java
  96. 14
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageControllerTest.java
  97. 21
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java
  98. 5
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java
  99. 11
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java
  100. 3
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java
  101. Some files were not shown because too many files have changed in this diff Show More

11
dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java

@ -17,6 +17,11 @@
package org.apache.dolphinscheduler.plugin.alert.dingtalk; package org.apache.dolphinscheduler.plugin.alert.dingtalk;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES;
import org.apache.dolphinscheduler.spi.alert.AlertChannel; import org.apache.dolphinscheduler.spi.alert.AlertChannel;
import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory;
import org.apache.dolphinscheduler.spi.params.InputParam; import org.apache.dolphinscheduler.spi.params.InputParam;
@ -52,9 +57,9 @@ public class DingTalkAlertChannelFactory implements AlertChannelFactory {
.build(); .build();
RadioParam isEnableProxy = RadioParam isEnableProxy =
RadioParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE) RadioParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE)
.addParamsOptions(new ParamsOptions("YES", true, false)) .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false))
.addParamsOptions(new ParamsOptions("NO", false, false)) .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false))
.setValue(true) .setValue(STRING_TRUE)
.addValidate(Validate.newBuilder() .addValidate(Validate.newBuilder()
.setRequired(false) .setRequired(false)
.build()) .build())

23
dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java

@ -17,6 +17,11 @@
package org.apache.dolphinscheduler.plugin.alert.email; package org.apache.dolphinscheduler.plugin.alert.email;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES;
import org.apache.dolphinscheduler.spi.alert.AlertChannel; import org.apache.dolphinscheduler.spi.alert.AlertChannel;
import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory;
import org.apache.dolphinscheduler.spi.alert.AlertConstants; import org.apache.dolphinscheduler.spi.alert.AlertConstants;
@ -72,9 +77,9 @@ public class EmailAlertChannelFactory implements AlertChannelFactory {
.build(); .build();
RadioParam enableSmtpAuth = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_AUTH, MailParamsConstants.MAIL_SMTP_AUTH) RadioParam enableSmtpAuth = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_AUTH, MailParamsConstants.MAIL_SMTP_AUTH)
.addParamsOptions(new ParamsOptions("YES", true, false)) .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false))
.addParamsOptions(new ParamsOptions("NO", false, false)) .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false))
.setValue(true) .setValue(STRING_TRUE)
.addValidate(Validate.newBuilder().setRequired(true).build()) .addValidate(Validate.newBuilder().setRequired(true).build())
.build(); .build();
@ -87,16 +92,16 @@ public class EmailAlertChannelFactory implements AlertChannelFactory {
.build(); .build();
RadioParam enableTls = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_STARTTLS_ENABLE, MailParamsConstants.MAIL_SMTP_STARTTLS_ENABLE) RadioParam enableTls = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_STARTTLS_ENABLE, MailParamsConstants.MAIL_SMTP_STARTTLS_ENABLE)
.addParamsOptions(new ParamsOptions("YES", true, false)) .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false))
.addParamsOptions(new ParamsOptions("NO", false, false)) .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false))
.setValue(false) .setValue(STRING_FALSE)
.addValidate(Validate.newBuilder().setRequired(true).build()) .addValidate(Validate.newBuilder().setRequired(true).build())
.build(); .build();
RadioParam enableSsl = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_SSL_ENABLE, MailParamsConstants.MAIL_SMTP_SSL_ENABLE) RadioParam enableSsl = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_SSL_ENABLE, MailParamsConstants.MAIL_SMTP_SSL_ENABLE)
.addParamsOptions(new ParamsOptions("YES", true, false)) .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false))
.addParamsOptions(new ParamsOptions("NO", false, false)) .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false))
.setValue(false) .setValue(STRING_FALSE)
.addValidate(Validate.newBuilder().setRequired(true).build()) .addValidate(Validate.newBuilder().setRequired(true).build())
.build(); .build();

11
dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java

@ -17,6 +17,11 @@
package org.apache.dolphinscheduler.plugin.alert.feishu; package org.apache.dolphinscheduler.plugin.alert.feishu;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES;
import org.apache.dolphinscheduler.spi.alert.AlertChannel; import org.apache.dolphinscheduler.spi.alert.AlertChannel;
import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory;
import org.apache.dolphinscheduler.spi.params.InputParam; import org.apache.dolphinscheduler.spi.params.InputParam;
@ -44,9 +49,9 @@ public class FeiShuAlertChannelFactory implements AlertChannelFactory {
.build(); .build();
RadioParam isEnableProxy = RadioParam isEnableProxy =
RadioParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_PROXY_ENABLE, FeiShuParamsConstants.NAME_FEI_SHU_PROXY_ENABLE) RadioParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_PROXY_ENABLE, FeiShuParamsConstants.NAME_FEI_SHU_PROXY_ENABLE)
.addParamsOptions(new ParamsOptions("YES", true, false)) .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false))
.addParamsOptions(new ParamsOptions("NO", false, false)) .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false))
.setValue(true) .setValue(STRING_TRUE)
.addValidate(Validate.newBuilder() .addValidate(Validate.newBuilder()
.setRequired(false) .setRequired(false)
.build()) .build())

4
dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java

@ -56,8 +56,8 @@ public class ScriptAlertChannelFactory implements AlertChannelFactory {
.build(); .build();
RadioParam scriptTypeParams = RadioParam.newBuilder(ScriptParamsConstants.NAME_SCRIPT_TYPE, ScriptParamsConstants.SCRIPT_TYPE) RadioParam scriptTypeParams = RadioParam.newBuilder(ScriptParamsConstants.NAME_SCRIPT_TYPE, ScriptParamsConstants.SCRIPT_TYPE)
.addParamsOptions(new ParamsOptions(ScriptType.SHELL.getDescp(), ScriptType.SHELL.getCode(), false)) .addParamsOptions(new ParamsOptions(ScriptType.SHELL.getDescp(), ScriptType.SHELL.getDescp(), false))
.setValue(ScriptType.SHELL.getCode()) .setValue(ScriptType.SHELL.getDescp())
.addValidate(Validate.newBuilder().setRequired(true).build()) .addValidate(Validate.newBuilder().setRequired(true).build())
.build(); .build();

6
dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java

@ -33,19 +33,19 @@ public class ScriptSender {
private String scriptPath; private String scriptPath;
private Integer scriptType; private String scriptType;
private String userParams; private String userParams;
ScriptSender(Map<String, String> config) { ScriptSender(Map<String, String> config) {
scriptPath = config.get(ScriptParamsConstants.NAME_SCRIPT_PATH); scriptPath = config.get(ScriptParamsConstants.NAME_SCRIPT_PATH);
scriptType = Integer.parseInt(config.get(ScriptParamsConstants.NAME_SCRIPT_TYPE)); scriptType = config.get(ScriptParamsConstants.NAME_SCRIPT_TYPE);
userParams = config.get(ScriptParamsConstants.NAME_SCRIPT_USER_PARAMS); userParams = config.get(ScriptParamsConstants.NAME_SCRIPT_USER_PARAMS);
} }
AlertResult sendScriptAlert(String msg) { AlertResult sendScriptAlert(String msg) {
AlertResult alertResult = new AlertResult(); AlertResult alertResult = new AlertResult();
if (ScriptType.of(scriptType).equals(ScriptType.SHELL)) { if (ScriptType.SHELL.getDescp().equals(scriptType)) {
return executeShellScript(msg); return executeShellScript(msg);
} }
return alertResult; return alertResult;

14
dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java

@ -45,18 +45,4 @@ public enum ScriptType {
return descp; return descp;
} }
private static final Map<Integer, ScriptType> SCRIPT_TYPE_MAP = new HashMap<>();
static {
for (ScriptType scriptType : ScriptType.values()) {
SCRIPT_TYPE_MAP.put(scriptType.code, scriptType);
}
}
public static ScriptType of(Integer code) {
if (SCRIPT_TYPE_MAP.containsKey(code)) {
return SCRIPT_TYPE_MAP.get(code);
}
throw new IllegalArgumentException("invalid code : " + code);
}
} }

2
dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java

@ -40,7 +40,7 @@ public class ScriptSenderTest {
@Before @Before
public void initScriptConfig() { public void initScriptConfig() {
scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_TYPE, String.valueOf(ScriptType.SHELL.getCode())); scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_TYPE, String.valueOf(ScriptType.SHELL.getDescp()));
scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_USER_PARAMS, "userParams"); scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_USER_PARAMS, "userParams");
scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_PATH, shellFilPath); scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_PATH, shellFilPath);
} }

4
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java

@ -14,8 +14,8 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller;
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_ACCESS_TOKEN_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_ACCESS_TOKEN_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_ACCESS_TOKEN_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_ACCESS_TOKEN_ERROR;
@ -54,7 +54,7 @@ import springfox.documentation.annotations.ApiIgnore;
/** /**
* access token controller * access token controller
*/ */
@Api(tags = "ACCESS_TOKEN_TAG", position = 1) @Api(tags = "ACCESS_TOKEN_TAG")
@RestController @RestController
@RequestMapping("/access-token") @RequestMapping("/access-token")
public class AccessTokenController extends BaseController { public class AccessTokenController extends BaseController {

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java

@ -26,13 +26,12 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ALERT_GROUP_ER
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.AlertGroupService; import org.apache.dolphinscheduler.api.service.AlertGroupService;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -56,7 +55,7 @@ import springfox.documentation.annotations.ApiIgnore;
/** /**
* alert group controller * alert group controller
*/ */
@Api(tags = "ALERT_GROUP_TAG", position = 1) @Api(tags = "ALERT_GROUP_TAG")
@RestController @RestController
@RequestMapping("alert-group") @RequestMapping("alert-group")
public class AlertGroupController extends BaseController { public class AlertGroupController extends BaseController {
@ -88,12 +87,9 @@ public class AlertGroupController extends BaseController {
@RequestParam(value = "groupName") String groupName, @RequestParam(value = "groupName") String groupName,
@RequestParam(value = "description", required = false) String description, @RequestParam(value = "description", required = false) String description,
@RequestParam(value = "alertInstanceIds") String alertInstanceIds) { @RequestParam(value = "alertInstanceIds") String alertInstanceIds) {
String strUserName = StringUtils.replaceNRTtoUnderline(loginUser.getUserName());
String strGroupName = StringUtils.replaceNRTtoUnderline(groupName);
String strDescription = StringUtils.replaceNRTtoUnderline(description);
String strAlertInstanceIds = StringUtils.replaceNRTtoUnderline(alertInstanceIds);
logger.info("loginUser user {}, create alert group, groupName: {}, desc: {},alertInstanceIds:{}", logger.info("loginUser user {}, create alert group, groupName: {}, desc: {},alertInstanceIds:{}",
strUserName, strGroupName, strDescription, strAlertInstanceIds); RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(groupName),
RegexUtils.escapeNRT(description), RegexUtils.escapeNRT(alertInstanceIds));
Map<String, Object> result = alertGroupService.createAlertgroup(loginUser, groupName, description, alertInstanceIds); Map<String, Object> result = alertGroupService.createAlertgroup(loginUser, groupName, description, alertInstanceIds);
return returnDataList(result); return returnDataList(result);
} }
@ -111,7 +107,7 @@ public class AlertGroupController extends BaseController {
public Result list(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { public Result list(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, query all alertGroup", logger.info("login user {}, query all alertGroup",
loginUser.getUserName()); loginUser.getUserName());
HashMap<String, Object> result = alertGroupService.queryAlertgroup(); Map<String, Object> result = alertGroupService.queryAlertgroup();
return returnDataList(result); return returnDataList(result);
} }
@ -174,9 +170,9 @@ public class AlertGroupController extends BaseController {
@RequestParam(value = "description", required = false) String description, @RequestParam(value = "description", required = false) String description,
@RequestParam(value = "alertInstanceIds") String alertInstanceIds) { @RequestParam(value = "alertInstanceIds") String alertInstanceIds) {
logger.info("login user {}, updateProcessInstance alert group, groupName: {}, desc: {}", logger.info("login user {}, updateProcessInstance alert group, groupName: {}, desc: {}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), RegexUtils.escapeNRT(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(groupName), RegexUtils.escapeNRT(groupName),
StringUtils.replaceNRTtoUnderline(description)); RegexUtils.escapeNRT(description));
Map<String, Object> result = alertGroupService.updateAlertgroup(loginUser, id, groupName, description, alertInstanceIds); Map<String, Object> result = alertGroupService.updateAlertgroup(loginUser, id, groupName, description, alertInstanceIds);
return returnDataList(result); return returnDataList(result);
} }

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java

@ -27,9 +27,9 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ALERT_PLUGIN_I
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.AlertPluginInstanceService; import org.apache.dolphinscheduler.api.service.AlertPluginInstanceService;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map; import java.util.Map;
@ -55,7 +55,7 @@ import springfox.documentation.annotations.ApiIgnore;
/** /**
* alert plugin instance controller * alert plugin instance controller
*/ */
@Api(tags = "ALERT_PLUGIN_INSTANCE_TAG", position = 1) @Api(tags = "ALERT_PLUGIN_INSTANCE_TAG")
@RestController @RestController
@RequestMapping("alert-plugin-instance") @RequestMapping("alert-plugin-instance")
public class AlertPluginInstanceController extends BaseController { public class AlertPluginInstanceController extends BaseController {
@ -89,8 +89,8 @@ public class AlertPluginInstanceController extends BaseController {
@RequestParam(value = "instanceName") String instanceName, @RequestParam(value = "instanceName") String instanceName,
@RequestParam(value = "pluginInstanceParams") String pluginInstanceParams) { @RequestParam(value = "pluginInstanceParams") String pluginInstanceParams) {
logger.info("login user {},create alert plugin instance, instanceName:{} ", logger.info("login user {},create alert plugin instance, instanceName:{} ",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), RegexUtils.escapeNRT(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(instanceName)); RegexUtils.escapeNRT(instanceName));
Map<String, Object> result = alertPluginInstanceService.create(loginUser, pluginDefineId, instanceName, pluginInstanceParams); Map<String, Object> result = alertPluginInstanceService.create(loginUser, pluginDefineId, instanceName, pluginInstanceParams);
return returnDataList(result); return returnDataList(result);
} }
@ -117,7 +117,7 @@ public class AlertPluginInstanceController extends BaseController {
@RequestParam(value = "alertPluginInstanceId") int alertPluginInstanceId, @RequestParam(value = "alertPluginInstanceId") int alertPluginInstanceId,
@RequestParam(value = "instanceName") String instanceName, @RequestParam(value = "instanceName") String instanceName,
@RequestParam(value = "pluginInstanceParams") String pluginInstanceParams) { @RequestParam(value = "pluginInstanceParams") String pluginInstanceParams) {
logger.info("login user {},update alert plugin instance id {}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), alertPluginInstanceId); logger.info("login user {},update alert plugin instance id {}", RegexUtils.escapeNRT(loginUser.getUserName()), alertPluginInstanceId);
Map<String, Object> result = alertPluginInstanceService.update(loginUser, alertPluginInstanceId, instanceName, pluginInstanceParams); Map<String, Object> result = alertPluginInstanceService.update(loginUser, alertPluginInstanceId, instanceName, pluginInstanceParams);
return returnDataList(result); return returnDataList(result);
} }
@ -138,7 +138,7 @@ public class AlertPluginInstanceController extends BaseController {
@ApiException(DELETE_ALERT_PLUGIN_INSTANCE_ERROR) @ApiException(DELETE_ALERT_PLUGIN_INSTANCE_ERROR)
public Result deleteAlertPluginInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result deleteAlertPluginInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id) { @RequestParam(value = "id") int id) {
logger.info("login user {},delete alert plugin instance id {}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), id); logger.info("login user {},delete alert plugin instance id {}", RegexUtils.escapeNRT(loginUser.getUserName()), id);
Map<String, Object> result = alertPluginInstanceService.delete(loginUser, id); Map<String, Object> result = alertPluginInstanceService.delete(loginUser, id);
return returnDataList(result); return returnDataList(result);
@ -157,7 +157,7 @@ public class AlertPluginInstanceController extends BaseController {
@ApiException(GET_ALERT_PLUGIN_INSTANCE_ERROR) @ApiException(GET_ALERT_PLUGIN_INSTANCE_ERROR)
public Result getAlertPluginInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result getAlertPluginInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id) { @RequestParam(value = "id") int id) {
logger.info("login user {},get alert plugin instance, id {}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), id); logger.info("login user {},get alert plugin instance, id {}", RegexUtils.escapeNRT(loginUser.getUserName()), id);
Map<String, Object> result = alertPluginInstanceService.get(loginUser, id); Map<String, Object> result = alertPluginInstanceService.get(loginUser, id);
return returnDataList(result); return returnDataList(result);
} }
@ -173,7 +173,7 @@ public class AlertPluginInstanceController extends BaseController {
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_ALL_ALERT_PLUGIN_INSTANCE_ERROR) @ApiException(QUERY_ALL_ALERT_PLUGIN_INSTANCE_ERROR)
public Result getAlertPluginInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { public Result getAlertPluginInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, query all alert plugin instance", StringUtils.replaceNRTtoUnderline(loginUser.getUserName())); logger.info("login user {}, query all alert plugin instance", RegexUtils.escapeNRT(loginUser.getUserName()));
Map<String, Object> result = alertPluginInstanceService.queryAll(); Map<String, Object> result = alertPluginInstanceService.queryAll();
return returnDataList(result); return returnDataList(result);
} }
@ -193,7 +193,7 @@ public class AlertPluginInstanceController extends BaseController {
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
public Result verifyGroupName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result verifyGroupName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "alertInstanceName") String alertInstanceName) { @RequestParam(value = "alertInstanceName") String alertInstanceName) {
logger.info("login user {},verify alert instance name: {}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), StringUtils.replaceNRTtoUnderline(alertInstanceName)); logger.info("login user {},verify alert instance name: {}", RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(alertInstanceName));
boolean exist = alertPluginInstanceService.checkExistPluginInstanceName(alertInstanceName); boolean exist = alertPluginInstanceService.checkExistPluginInstanceName(alertInstanceName);
Result result = new Result(); Result result = new Result();
@ -227,7 +227,7 @@ public class AlertPluginInstanceController extends BaseController {
public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo, @RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) { @RequestParam("pageSize") Integer pageSize) {
logger.info("login user {}, list paging, pageNo: {}, pageSize: {}",StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), pageNo, pageSize); logger.info("login user {}, list paging, pageNo: {}, pageSize: {}", RegexUtils.escapeNRT(loginUser.getUserName()), pageNo, pageSize);
Map<String, Object> result = checkPageParams(pageNo, pageSize); Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) { if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result); return returnDataListPaging(result);

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java

@ -14,8 +14,14 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.common.Constants.COMMA;
import static org.apache.dolphinscheduler.common.Constants.HTTP_HEADER_UNKNOWN;
import static org.apache.dolphinscheduler.common.Constants.HTTP_X_FORWARDED_FOR;
import static org.apache.dolphinscheduler.common.Constants.HTTP_X_REAL_IP;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
@ -23,12 +29,11 @@ import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Resource;
import javax.servlet.http.HttpServletRequest;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import static org.apache.dolphinscheduler.common.Constants.*; import javax.servlet.http.HttpServletRequest;
/** /**
* base controller * base controller

31
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java

@ -14,33 +14,44 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.COMMAND_STATE_COUNT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.COUNT_PROCESS_DEFINITION_USER_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.COUNT_PROCESS_INSTANCE_STATE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUEUE_COUNT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.TASK_INSTANCE_STATE_COUNT_ERROR;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.DataAnalysisService; import org.apache.dolphinscheduler.api.service.DataAnalysisService;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import java.util.Map;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.GetMapping;
import springfox.documentation.annotations.ApiIgnore; import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import java.util.Map; import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import static org.apache.dolphinscheduler.api.enums.Status.*; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/** /**
* data analysis controller * data analysis controller
*/ */
@Api(tags = "DATA_ANALYSIS_TAG", position = 1) @Api(tags = "DATA_ANALYSIS_TAG")
@RestController @RestController
@RequestMapping("projects/analysis") @RequestMapping("projects/analysis")
public class DataAnalysisController extends BaseController { public class DataAnalysisController extends BaseController {

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java

@ -62,7 +62,7 @@ import springfox.documentation.annotations.ApiIgnore;
/** /**
* data source controller * data source controller
*/ */
@Api(tags = "DATA_SOURCE_TAG", position = 3) @Api(tags = "DATA_SOURCE_TAG")
@RestController @RestController
@RequestMapping("datasources") @RequestMapping("datasources")
public class DataSourceController extends BaseController { public class DataSourceController extends BaseController {

7
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java

@ -35,7 +35,6 @@ import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import java.text.ParseException;
import java.util.Map; import java.util.Map;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -58,9 +57,9 @@ import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
/** /**
* execute process controller * executor controller
*/ */
@Api(tags = "PROCESS_INSTANCE_EXECUTOR_TAG", position = 1) @Api(tags = "EXECUTOR_TAG")
@RestController @RestController
@RequestMapping("projects/{projectName}/executors") @RequestMapping("projects/{projectName}/executors")
public class ExecutorController extends BaseController { public class ExecutorController extends BaseController {
@ -121,7 +120,7 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "timeout", required = false) Integer timeout, @RequestParam(value = "timeout", required = false) Integer timeout,
@RequestParam(value = "startParams", required = false) String startParams) throws ParseException { @RequestParam(value = "startParams", required = false) String startParams) {
logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, " logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, "
+ "failure policy: {}, node name: {}, node dep: {}, notify type: {}, " + "failure policy: {}, node name: {}, node dep: {}, notify type: {}, "
+ "notify group id: {}, run mode: {},process instance priority:{}, workerGroup: {}, timeout: {}, startParams: {} ", + "notify group id: {}, run mode: {},process instance priority:{}, workerGroup: {}, timeout: {}, startParams: {} ",

7
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java

@ -14,8 +14,8 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller;
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_INSTANCE_LOG_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_INSTANCE_LOG_ERROR;
@ -46,11 +46,10 @@ import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
/** /**
* log controller * logger controller
*/ */
@Api(tags = "LOGGER_TAG", position = 13) @Api(tags = "LOGGER_TAG")
@RestController @RestController
@RequestMapping("/log") @RequestMapping("/log")
public class LoggerController extends BaseController { public class LoggerController extends BaseController {

36
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java

@ -14,8 +14,12 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.IP_IS_EMPTY;
import static org.apache.dolphinscheduler.api.enums.Status.SIGN_OUT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.USER_LOGIN_FAILURE;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
@ -25,28 +29,34 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.*;
import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.httpclient.HttpStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import javax.servlet.http.Cookie; import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import java.util.Map; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.dolphinscheduler.api.enums.Status.*; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/** /**
* user login controller * login controller
* <p>
* swagger bootstrap ui docs refer : https://doc.xiaominfo.com/guide/enh-func.html
*/ */
@Api(tags = "LOGIN_TAG", position = 1) @Api(tags = "LOGIN_TAG")
@RestController @RestController
@RequestMapping("") @RequestMapping("")
public class LoginController extends BaseController { public class LoginController extends BaseController {

25
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java

@ -14,31 +14,40 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.LIST_MASTERS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.LIST_WORKERS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATABASE_STATE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_ZOOKEEPER_STATE_ERROR;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.api.service.MonitorService;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation; import java.util.Map;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.GetMapping;
import springfox.documentation.annotations.ApiIgnore; import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import java.util.Map; import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import static org.apache.dolphinscheduler.api.enums.Status.*; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/** /**
* monitor controller * monitor controller
*/ */
@Api(tags = "MONITOR_TAG", position = 1) @Api(tags = "MONITOR_TAG")
@RestController @RestController
@RequestMapping("/monitor") @RequestMapping("/monitor")
public class MonitorController extends BaseController { public class MonitorController extends BaseController {

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java

@ -38,6 +38,7 @@ import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.ReleaseState;
@ -76,11 +77,10 @@ import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
/** /**
* process definition controller * process definition controller
*/ */
@Api(tags = "PROCESS_DEFINITION_TAG", position = 2) @Api(tags = "PROCESS_DEFINITION_TAG")
@RestController @RestController
@RequestMapping("projects/{projectName}/process") @RequestMapping("projects/{projectName}/process")
public class ProcessDefinitionController extends BaseController { public class ProcessDefinitionController extends BaseController {
@ -154,10 +154,10 @@ public class ProcessDefinitionController extends BaseController {
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds, @RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) { @RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}", logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), RegexUtils.escapeNRT(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName), RegexUtils.escapeNRT(projectName),
StringUtils.replaceNRTtoUnderline(processDefinitionIds), RegexUtils.escapeNRT(processDefinitionIds),
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId))); RegexUtils.escapeNRT(String.valueOf(targetProjectId)));
return returnDataList( return returnDataList(
processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId)); processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));
@ -185,10 +185,10 @@ public class ProcessDefinitionController extends BaseController {
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds, @RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) { @RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}", logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), RegexUtils.escapeNRT(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName), RegexUtils.escapeNRT(projectName),
StringUtils.replaceNRTtoUnderline(processDefinitionIds), RegexUtils.escapeNRT(processDefinitionIds),
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId))); RegexUtils.escapeNRT(String.valueOf(targetProjectId)));
return returnDataList( return returnDataList(
processDefinitionService.batchMoveProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId)); processDefinitionService.batchMoveProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));

41
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java

@ -14,8 +14,20 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROCESS_INSTANCE_ERROR;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService; import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
@ -27,13 +39,6 @@ import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException; import java.text.ParseException;
@ -42,12 +47,30 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore;
/** /**
* process instance controller * process instance controller
*/ */
@Api(tags = "PROCESS_INSTANCE_TAG", position = 10) @Api(tags = "PROCESS_INSTANCE_TAG")
@RestController @RestController
@RequestMapping("projects/{projectName}/instance") @RequestMapping("projects/{projectName}/instance")
public class ProcessInstanceController extends BaseController { public class ProcessInstanceController extends BaseController {

42
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java

@ -14,37 +14,53 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROJECT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROJECT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.IMPORT_PROCESS_DEFINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_AUTHORIZED_PROJECT;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROJECT_DETAILS_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UNAUTHORIZED_PROJECT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROJECT_ERROR;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import java.util.Map;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/** /**
* project controller * project controller
*/ */
@Api(tags = "PROJECT_TAG", position = 1) @Api(tags = "PROJECT_TAG")
@RestController @RestController
@RequestMapping("projects") @RequestMapping("projects")
public class ProjectController extends BaseController { public class ProjectController extends BaseController {
@ -239,8 +255,8 @@ public class ProjectController extends BaseController {
@ApiException(QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR) @ApiException(QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR)
public Result queryProjectCreatedAndAuthorizedByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { public Result queryProjectCreatedAndAuthorizedByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, query authorized and user created project by user id: {}.", logger.info("login user {}, query authorized and user created project by user id: {}.",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), RegexUtils.escapeNRT(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(String.valueOf(loginUser.getId()))); RegexUtils.escapeNRT(String.valueOf(loginUser.getId())));
Map<String, Object> result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser); Map<String, Object> result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser);
return returnDataList(result); return returnDataList(result);
} }

32
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java

@ -14,8 +14,13 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_QUEUE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_QUEUE_LIST_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_QUEUE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_QUEUE_ERROR;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
@ -24,26 +29,31 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import java.util.Map;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.GetMapping;
import springfox.documentation.annotations.ApiIgnore; import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import java.util.Map; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import static org.apache.dolphinscheduler.api.enums.Status.*; import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/** /**
* queue controller * queue controller
*/ */
@Api(tags = "QUEUE_TAG", position = 1) @Api(tags = "QUEUE_TAG")
@RestController @RestController
@RequestMapping("/queue") @RequestMapping("/queue")
public class QueueController extends BaseController { public class QueueController extends BaseController {

10
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java

@ -82,7 +82,7 @@ import springfox.documentation.annotations.ApiIgnore;
/** /**
* resources controller * resources controller
*/ */
@Api(tags = "RESOURCES_TAG", position = 1) @Api(tags = "RESOURCES_TAG")
@RestController @RestController
@RequestMapping("resources") @RequestMapping("resources")
public class ResourcesController extends BaseController { public class ResourcesController extends BaseController {
@ -322,9 +322,7 @@ public class ResourcesController extends BaseController {
@RequestParam(value = "programType",required = false) ProgramType programType @RequestParam(value = "programType",required = false) ProgramType programType
) { ) {
String programTypeName = programType == null ? "" : programType.name(); String programTypeName = programType == null ? "" : programType.name();
String userName = loginUser.getUserName(); logger.info("query resource list, resource type:{}, program type:{}", type, programTypeName);
userName = userName.replaceAll("[\n|\r|\t]", "_");
logger.info("query resource list, login user:{}, resource type:{}, program type:{}", userName,programTypeName);
Map<String, Object> result = resourceService.queryResourceByProgramType(loginUser, type,programType); Map<String, Object> result = resourceService.queryResourceByProgramType(loginUser, type,programType);
return returnDataList(result); return returnDataList(result);
} }
@ -641,9 +639,7 @@ public class ResourcesController extends BaseController {
@ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR) @ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR)
public Result<Object> queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, public Result<Object> queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("type") UdfType type) { @RequestParam("type") UdfType type) {
String userName = loginUser.getUserName(); logger.info("query udf func list, type:{}", type);
userName = userName.replaceAll("[\n|\r|\t]", "_");
logger.info("query udf func list, user:{}, type:{}", userName, type);
Map<String, Object> result = udfFuncService.queryUdfFuncList(loginUser, type.ordinal()); Map<String, Object> result = udfFuncService.queryUdfFuncList(loginUser, type.ordinal());
return returnDataList(result); return returnDataList(result);
} }

10
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java

@ -29,13 +29,13 @@ import static org.apache.dolphinscheduler.common.Constants.SESSION_USER;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map; import java.util.Map;
@ -61,9 +61,9 @@ import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
/** /**
* schedule controller * scheduler controller
*/ */
@Api(tags = "SCHEDULER_TAG", position = 13) @Api(tags = "SCHEDULER_TAG")
@RestController @RestController
@RequestMapping("/projects/{projectName}/schedule") @RequestMapping("/projects/{projectName}/schedule")
public class SchedulerController extends BaseController { public class SchedulerController extends BaseController {
@ -116,7 +116,7 @@ public class SchedulerController extends BaseController {
@RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) { @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) {
logger.info("login user {},project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," logger.info("login user {},project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {},"
+ "failure policy: {},processInstancePriority : {}, workGroupId:{}", + "failure policy: {},processInstancePriority : {}, workGroupId:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), StringUtils.replaceNRTtoUnderline(projectName), processDefinitionId, schedule, warningType, warningGroupId, RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(projectName), processDefinitionId, schedule, warningType, warningGroupId,
failureStrategy, processInstancePriority, workerGroup); failureStrategy, processInstancePriority, workerGroup);
Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule, Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule,
warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup);
@ -161,7 +161,7 @@ public class SchedulerController extends BaseController {
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) {
logger.info("login user {},project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " logger.info("login user {},project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, "
+ "failure policy: {},processInstancePriority : {},workerGroupId:{}", + "failure policy: {},processInstancePriority : {},workerGroupId:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), StringUtils.replaceNRTtoUnderline(projectName), id, schedule, warningType, warningGroupId, failureStrategy, RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(projectName), id, schedule, warningType, warningGroupId, failureStrategy,
processInstancePriority, workerGroup); processInstancePriority, workerGroup);
Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectName, id, schedule, Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectName, id, schedule,

21
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java

@ -22,11 +22,11 @@ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_LIST_PAGIN
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.TaskInstanceService; import org.apache.dolphinscheduler.api.service.TaskInstanceService;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map; import java.util.Map;
@ -54,7 +54,7 @@ import springfox.documentation.annotations.ApiIgnore;
/** /**
* task instance controller * task instance controller
*/ */
@Api(tags = "TASK_INSTANCE_TAG", position = 11) @Api(tags = "TASK_INSTANCE_TAG")
@RestController @RestController
@RequestMapping("/projects/{projectName}/task-instance") @RequestMapping("/projects/{projectName}/task-instance")
public class TaskInstanceController extends BaseController { public class TaskInstanceController extends BaseController {
@ -64,7 +64,6 @@ public class TaskInstanceController extends BaseController {
@Autowired @Autowired
TaskInstanceService taskInstanceService; TaskInstanceService taskInstanceService;
/** /**
* query task list paging * query task list paging
* *
@ -113,16 +112,16 @@ public class TaskInstanceController extends BaseController {
@RequestParam("pageSize") Integer pageSize) { @RequestParam("pageSize") Integer pageSize) {
logger.info("query task instance list, projectName:{}, processInstanceId:{}, processInstanceName:{}, search value:{}, taskName:{}, executorName: {}, stateType:{}, host:{}, start:{}, end:{}", logger.info("query task instance list, projectName:{}, processInstanceId:{}, processInstanceName:{}, search value:{}, taskName:{}, executorName: {}, stateType:{}, host:{}, start:{}, end:{}",
StringUtils.replaceNRTtoUnderline(projectName), RegexUtils.escapeNRT(projectName),
processInstanceId, processInstanceId,
StringUtils.replaceNRTtoUnderline(processInstanceName), RegexUtils.escapeNRT(processInstanceName),
StringUtils.replaceNRTtoUnderline(searchVal), RegexUtils.escapeNRT(searchVal),
StringUtils.replaceNRTtoUnderline(taskName), RegexUtils.escapeNRT(taskName),
StringUtils.replaceNRTtoUnderline(executorName), RegexUtils.escapeNRT(executorName),
stateType, stateType,
StringUtils.replaceNRTtoUnderline(host), RegexUtils.escapeNRT(host),
StringUtils.replaceNRTtoUnderline(startTime), RegexUtils.escapeNRT(startTime),
StringUtils.replaceNRTtoUnderline(endTime)); RegexUtils.escapeNRT(endTime));
searchVal = ParameterUtils.handleEscapes(searchVal); searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = taskInstanceService.queryTaskListPaging( Map<String, Object> result = taskInstanceService.queryTaskListPaging(
loginUser, projectName, processInstanceId, processInstanceName, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); loginUser, projectName, processInstanceId, processInstanceName, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize);

17
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java

@ -14,8 +14,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.TaskRecordService; import org.apache.dolphinscheduler.api.service.TaskRecordService;
@ -23,20 +25,23 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/** /**
* data quality controller * task record controller
*/ */
@ApiIgnore @ApiIgnore
@RestController @RestController

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java

@ -27,9 +27,9 @@ import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_OS_TENANT_CODE
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.TenantService; import org.apache.dolphinscheduler.api.service.TenantService;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
@ -53,18 +53,16 @@ import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
/** /**
* tenant controller * tenant controller
*/ */
@Api(tags = "TENANT_TAG", position = 1) @Api(tags = "TENANT_TAG")
@RestController @RestController
@RequestMapping("/tenant") @RequestMapping("/tenant")
public class TenantController extends BaseController { public class TenantController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(TenantController.class); private static final Logger logger = LoggerFactory.getLogger(TenantController.class);
@Autowired @Autowired
private TenantService tenantService; private TenantService tenantService;
@ -91,15 +89,13 @@ public class TenantController extends BaseController {
@RequestParam(value = "tenantCode") String tenantCode, @RequestParam(value = "tenantCode") String tenantCode,
@RequestParam(value = "queueId") int queueId, @RequestParam(value = "queueId") int queueId,
@RequestParam(value = "description", required = false) String description) throws Exception { @RequestParam(value = "description", required = false) String description) throws Exception {
String userReplace = StringUtils.replaceNRTtoUnderline(loginUser.getUserName()); logger.info("login user {}, create tenant, tenantCode: {}, queueId: {}, desc: {}",
String tenantCodeReplace = StringUtils.replaceNRTtoUnderline(tenantCode); RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(tenantCode),
String descReplace = StringUtils.replaceNRTtoUnderline(description); queueId, RegexUtils.escapeNRT(description));
logger.info("login user {}, create tenant, tenantCode: {}, queueId: {}, desc: {}", userReplace, tenantCodeReplace, queueId, descReplace);
Map<String, Object> result = tenantService.createTenant(loginUser, tenantCode, queueId, description); Map<String, Object> result = tenantService.createTenant(loginUser, tenantCode, queueId, description);
return returnDataList(result); return returnDataList(result);
} }
/** /**
* query tenant list paging * query tenant list paging
* *
@ -177,9 +173,9 @@ public class TenantController extends BaseController {
@RequestParam(value = "tenantCode") String tenantCode, @RequestParam(value = "tenantCode") String tenantCode,
@RequestParam(value = "queueId") int queueId, @RequestParam(value = "queueId") int queueId,
@RequestParam(value = "description", required = false) String description) throws Exception { @RequestParam(value = "description", required = false) String description) throws Exception {
String userReplace = StringUtils.replaceNRTtoUnderline(loginUser.getUserName()); String userReplace = RegexUtils.escapeNRT(loginUser.getUserName());
String tenantCodeReplace = StringUtils.replaceNRTtoUnderline(tenantCode); String tenantCodeReplace = RegexUtils.escapeNRT(tenantCode);
String descReplace = StringUtils.replaceNRTtoUnderline(description); String descReplace = RegexUtils.escapeNRT(description);
logger.info("login user {}, create tenant, tenantCode: {}, queueId: {}, desc: {}", userReplace, tenantCodeReplace, queueId, descReplace); logger.info("login user {}, create tenant, tenantCode: {}, queueId: {}, desc: {}", userReplace, tenantCodeReplace, queueId, descReplace);
Map<String, Object> result = tenantService.updateTenant(loginUser, id, tenantCode, queueId, description); Map<String, Object> result = tenantService.updateTenant(loginUser, id, tenantCode, queueId, description);
return returnDataList(result); return returnDataList(result);

4
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UiPluginController.java

@ -46,12 +46,12 @@ import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.annotations.ApiIgnore;
/** /**
* UiPluginController * ui plugin controller
* Some plugins (such as alert plugin) need to provide UI interfaces to users. * Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams. * We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. * From-create can generate dynamic ui based on this parameter.
*/ */
@Api(tags = "UI_PLUGINS", position = 1) @Api(tags = "UI_PLUGINS_TAG")
@RestController @RestController
@RequestMapping("ui-plugins") @RequestMapping("ui-plugins")
public class UiPluginController extends BaseController { public class UiPluginController extends BaseController {

48
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java

@ -14,8 +14,22 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_USER_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_USER_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_USER_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GET_USER_INFO_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GRANT_DATASOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GRANT_PROJECT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GRANT_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GRANT_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_USER_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UNAUTHORIZED_USER_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_USER_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.USER_LIST_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_USERNAME_ERROR;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
@ -24,28 +38,34 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/** /**
* user controller * users controller
*/ */
@Api(tags = "USERS_TAG", position = 14) @Api(tags = "USERS_TAG")
@RestController @RestController
@RequestMapping("/users") @RequestMapping("/users")
public class UsersController extends BaseController { public class UsersController extends BaseController {

46
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java

@ -14,9 +14,11 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import io.swagger.annotations.ApiParam; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKFLOW_LINEAGE_ERROR;
import static org.apache.dolphinscheduler.common.Constants.SESSION_USER;
import org.apache.dolphinscheduler.api.service.WorkFlowLineageService; import org.apache.dolphinscheduler.api.service.WorkFlowLineageService;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
@ -24,21 +26,31 @@ import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKFLOW_LINEAGE_ERROR; import org.slf4j.Logger;
import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore;
/**
* work flow lineage controller
*/
@Api(tags = "WORK_FLOW_LINEAGE_TAG")
@RestController @RestController
@RequestMapping("lineages/{projectId}") @RequestMapping("lineages/{projectId}")
public class WorkFlowLineageController extends BaseController { public class WorkFlowLineageController extends BaseController {
@ -47,7 +59,7 @@ public class WorkFlowLineageController extends BaseController {
@Autowired @Autowired
private WorkFlowLineageService workFlowLineageService; private WorkFlowLineageService workFlowLineageService;
@GetMapping(value="/list-name") @GetMapping(value = "/list-name")
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
public Result<List<WorkFlowLineage>> queryWorkFlowLineageByName(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, public Result<List<WorkFlowLineage>> queryWorkFlowLineageByName(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId, @ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId,
@ -56,32 +68,30 @@ public class WorkFlowLineageController extends BaseController {
searchVal = ParameterUtils.handleEscapes(searchVal); searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByName(searchVal,projectId); Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByName(searchVal,projectId);
return returnDataList(result); return returnDataList(result);
} catch (Exception e){ } catch (Exception e) {
logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e); logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e);
return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg()); return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg());
} }
} }
@GetMapping(value="/list-ids") @GetMapping(value = "/list-ids")
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)
public Result<Map<String, Object>> queryWorkFlowLineageByIds(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, public Result<Map<String, Object>> queryWorkFlowLineageByIds(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId, @ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId,
@ApiIgnore @RequestParam(value = "ids", required = false) String ids) { @ApiIgnore @RequestParam(value = "ids", required = false) String ids) {
try { try {
ids = ParameterUtils.handleEscapes(ids); ids = ParameterUtils.handleEscapes(ids);
Set<Integer> idsSet = new HashSet<>(); Set<Integer> idsSet = new HashSet<>();
if(ids != null) { if (ids != null) {
String[] idsStr = ids.split(","); String[] idsStr = ids.split(",");
for (String id : idsStr) for (String id : idsStr) {
{
idsSet.add(Integer.parseInt(id)); idsSet.add(Integer.parseInt(id));
} }
} }
Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByIds(idsSet, projectId); Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByIds(idsSet, projectId);
return returnDataList(result); return returnDataList(result);
} catch (Exception e){ } catch (Exception e) {
logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e); logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e);
return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg()); return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg());
} }

30
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java

@ -14,8 +14,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKER_GROUP_FAIL;
import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.WorkerGroupService; import org.apache.dolphinscheduler.api.service.WorkerGroupService;
@ -23,25 +25,30 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam; import java.util.Map;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.GetMapping;
import springfox.documentation.annotations.ApiIgnore; import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import static org.apache.dolphinscheduler.api.enums.Status.*; import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/** /**
* worker group controller * worker group controller
*/ */
@Api(tags = "WORKER_GROUP_TAG", position = 1) @Api(tags = "WORKER_GROUP_TAG")
@RestController @RestController
@RequestMapping("/worker-group") @RequestMapping("/worker-group")
public class WorkerGroupController extends BaseController { public class WorkerGroupController extends BaseController {
@ -51,9 +58,6 @@ public class WorkerGroupController extends BaseController {
@Autowired @Autowired
WorkerGroupService workerGroupService; WorkerGroupService workerGroupService;
/** /**
* query worker groups paging * query worker groups paging
* *

1
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;

140
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java

@ -17,55 +17,21 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.AlertGroup;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/** /**
* alert group service * alert group service
*/ */
@Service public interface AlertGroupService {
public class AlertGroupService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(AlertGroupService.class);
@Autowired
private AlertGroupMapper alertGroupMapper;
/** /**
* query alert group list * query alert group list
* *
* @return alert group list * @return alert group list
*/ */
public HashMap<String, Object> queryAlertgroup() { Map<String, Object> queryAlertgroup();
HashMap<String, Object> result = new HashMap<>();
List<AlertGroup> alertGroups = alertGroupMapper.queryAllGroupList();
result.put(Constants.DATA_LIST, alertGroups);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* paging query alarm group list * paging query alarm group list
@ -76,24 +42,7 @@ public class AlertGroupService extends BaseService {
* @param pageSize page size * @param pageSize page size
* @return alert group list page * @return alert group list page
*/ */
public Map<String, Object> listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { Map<String, Object> listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize);
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
Page<AlertGroup> page = new Page(pageNo, pageSize);
IPage<AlertGroup> alertGroupIPage = alertGroupMapper.queryAlertGroupPage(
page, searchVal);
PageInfo<AlertGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int) alertGroupIPage.getTotal());
pageInfo.setLists(alertGroupIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* create alert group * create alert group
@ -104,33 +53,7 @@ public class AlertGroupService extends BaseService {
* @param alertInstanceIds alertInstanceIds * @param alertInstanceIds alertInstanceIds
* @return create result code * @return create result code
*/ */
public Map<String, Object> createAlertgroup(User loginUser, String groupName, String desc, String alertInstanceIds) { Map<String, Object> createAlertgroup(User loginUser, String groupName, String desc, String alertInstanceIds);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (isNotAdmin(loginUser, result)) {
return result;
}
AlertGroup alertGroup = new AlertGroup();
Date now = new Date();
alertGroup.setGroupName(groupName);
alertGroup.setAlertInstanceIds(alertInstanceIds);
alertGroup.setDescription(desc);
alertGroup.setCreateTime(now);
alertGroup.setUpdateTime(now);
alertGroup.setCreateUserId(loginUser.getId());
// insert
int insert = alertGroupMapper.insert(alertGroup);
if (insert > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_ALERT_GROUP_ERROR);
}
return result;
}
/** /**
* updateProcessInstance alert group * updateProcessInstance alert group
@ -142,35 +65,7 @@ public class AlertGroupService extends BaseService {
* @param alertInstanceIds alertInstanceIds * @param alertInstanceIds alertInstanceIds
* @return update result code * @return update result code
*/ */
public Map<String, Object> updateAlertgroup(User loginUser, int id, String groupName, String desc, String alertInstanceIds) { Map<String, Object> updateAlertgroup(User loginUser, int id, String groupName, String desc, String alertInstanceIds);
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
AlertGroup alertGroup = alertGroupMapper.selectById(id);
if (alertGroup == null) {
putMsg(result, Status.ALERT_GROUP_NOT_EXIST);
return result;
}
Date now = new Date();
if (StringUtils.isNotEmpty(groupName)) {
alertGroup.setGroupName(groupName);
}
alertGroup.setDescription(desc);
alertGroup.setUpdateTime(now);
alertGroup.setCreateUserId(loginUser.getId());
alertGroup.setAlertInstanceIds(alertInstanceIds);
// updateProcessInstance
alertGroupMapper.updateById(alertGroup);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* delete alert group by id * delete alert group by id
@ -179,25 +74,7 @@ public class AlertGroupService extends BaseService {
* @param id alert group id * @param id alert group id
* @return delete result code * @return delete result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) Map<String, Object> delAlertgroupById(User loginUser, int id);
public Map<String, Object> delAlertgroupById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
if (isNotAdmin(loginUser, result)) {
return result;
}
//check exist
AlertGroup alertGroup = alertGroupMapper.selectById(id);
if (alertGroup == null) {
putMsg(result, Status.ALERT_GROUP_NOT_EXIST);
return result;
}
alertGroupMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* verify group name exists * verify group name exists
@ -205,8 +82,5 @@ public class AlertGroupService extends BaseService {
* @param groupName group name * @param groupName group name
* @return check result code * @return check result code
*/ */
public boolean existGroupName(String groupName) { boolean existGroupName(String groupName);
List<AlertGroup> alertGroup = alertGroupMapper.queryByGroupName(groupName);
return CollectionUtils.isNotEmpty(alertGroup);
}
} }

82
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java

@ -14,26 +14,20 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service;
import java.text.MessageFormat;
import java.util.Map;
import javax.servlet.http.Cookie; package org.apache.dolphinscheduler.api.service;
import javax.servlet.http.HttpServletRequest;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import java.io.IOException;
import java.util.Map;
/** /**
* base service * base service
*/ */
public class BaseService { public interface BaseService {
/** /**
* check admin * check admin
@ -41,9 +35,7 @@ public class BaseService {
* @param user input user * @param user input user
* @return ture if administrator, otherwise return false * @return ture if administrator, otherwise return false
*/ */
protected boolean isAdmin(User user) { boolean isAdmin(User user);
return user.getUserType() == UserType.ADMIN_USER;
}
/** /**
* isNotAdmin * isNotAdmin
@ -52,14 +44,7 @@ public class BaseService {
* @param result result code * @param result result code
* @return true if not administrator, otherwise false * @return true if not administrator, otherwise false
*/ */
protected boolean isNotAdmin(User loginUser, Map<String, Object> result) { boolean isNotAdmin(User loginUser, Map<String, Object> result);
//only admin can operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return true;
}
return false;
}
/** /**
* put message to map * put message to map
@ -68,14 +53,7 @@ public class BaseService {
* @param status status * @param status status
* @param statusParams status message * @param statusParams status message
*/ */
protected void putMsg(Map<String, Object> result, Status status, Object... statusParams) { void putMsg(Map<String, Object> result, Status status, Object... statusParams);
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
/** /**
* put message to result object * put message to result object
@ -84,16 +62,7 @@ public class BaseService {
* @param status status * @param status status
* @param statusParams status message * @param statusParams status message
*/ */
protected void putMsg(Result result, Status status, Object... statusParams) { void putMsg(Result<Object> result, Status status, Object... statusParams);
result.setCode(status.getCode());
if (statusParams != null && statusParams.length > 0) {
result.setMsg(MessageFormat.format(status.getMsg(), statusParams));
} else {
result.setMsg(status.getMsg());
}
}
/** /**
* check * check
@ -103,34 +72,21 @@ public class BaseService {
* @param userNoOperationPerm status * @param userNoOperationPerm status
* @return check result * @return check result
*/ */
protected boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm) { boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm);
//only admin can operate
if (bool) {
result.put(Constants.STATUS, userNoOperationPerm);
result.put(Constants.MSG, userNoOperationPerm.getMsg());
return true;
}
return false;
}
/** /**
* create tenant dir if not exists * create tenant dir if not exists
* *
* @param tenantCode tenant code * @param tenantCode tenant code
* @throws Exception if hdfs operation exception * @throws IOException if hdfs operation exception
*/ */
protected void createTenantDirIfNotExists(String tenantCode) throws Exception { void createTenantDirIfNotExists(String tenantCode) throws IOException;
String resourcePath = HadoopUtils.getHdfsResDir(tenantCode);
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
/**
* init resource path and udf path
*/
HadoopUtils.getInstance().mkdir(resourcePath);
HadoopUtils.getInstance().mkdir(udfsPath);
}
protected boolean hasPerm(User operateUser, int createUserId) { /**
return operateUser.getId() == createUserId || isAdmin(operateUser); * has perm
} *
* @param operateUser operate user
* @param createUserId create user id
*/
boolean hasPerm(User operateUser, int createUserId);
} }

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java

@ -14,8 +14,8 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service;
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;

544
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java

@ -17,67 +17,17 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.datasource.OracleDataSource;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.node.ObjectNode;
/** /**
* datasource service * data source service
*/ */
@Service public interface DataSourceService {
public class DataSourceService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(DataSourceService.class);
public static final String NAME = "name";
public static final String NOTE = "note";
public static final String TYPE = "type";
public static final String HOST = "host";
public static final String PORT = "port";
public static final String PRINCIPAL = "principal";
public static final String DATABASE = "database";
public static final String USER_NAME = "userName";
public static final String OTHER = "other";
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private DataSourceUserMapper datasourceUserMapper;
/** /**
* create data source * create data source
@ -89,37 +39,7 @@ public class DataSourceService extends BaseService {
* @param parameter datasource parameters * @param parameter datasource parameters
* @return create result code * @return create result code
*/ */
public Result<Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { Result<Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter);
Result<Object> result = new Result<>();
// check name can use or not
if (checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
Result<Object> isConnection = checkConnection(type, parameter);
if (Status.SUCCESS.getCode() != isConnection.getCode()) {
return result;
}
// build datasource
DataSource dataSource = new DataSource();
Date now = new Date();
dataSource.setName(name.trim());
dataSource.setNote(desc);
dataSource.setUserId(loginUser.getId());
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type);
dataSource.setConnectionParams(parameter);
dataSource.setCreateTime(now);
dataSource.setUpdateTime(now);
dataSourceMapper.insert(dataSource);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* updateProcessInstance datasource * updateProcessInstance datasource
@ -132,59 +52,7 @@ public class DataSourceService extends BaseService {
* @param id data source id * @param id data source id
* @return update result code * @return update result code
*/ */
public Result<Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { Result<Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter);
Result<Object> result = new Result<>();
// determine whether the data source exists
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
//check name can use or not
if (!name.trim().equals(dataSource.getName()) && checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
//check password,if the password is not updated, set to the old password.
ObjectNode paramObject = JSONUtils.parseObject(parameter);
String password = paramObject.path(Constants.PASSWORD).asText();
if (StringUtils.isBlank(password)) {
String oldConnectionParams = dataSource.getConnectionParams();
ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams);
paramObject.put(Constants.PASSWORD, oldParams.path(Constants.PASSWORD).asText());
}
// connectionParams json
String connectionParams = paramObject.toString();
Result<Object> isConnection = checkConnection(type, parameter);
if (Status.SUCCESS.getCode() != isConnection.getCode()) {
return result;
}
Date now = new Date();
dataSource.setName(name.trim());
dataSource.setNote(desc);
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type);
dataSource.setConnectionParams(connectionParams);
dataSource.setUpdateTime(now);
dataSourceMapper.updateById(dataSource);
putMsg(result, Status.SUCCESS);
return result;
}
private boolean checkName(String name) {
List<DataSource> queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim());
return queryDataSource != null && queryDataSource.size() > 0;
}
/** /**
* updateProcessInstance datasource * updateProcessInstance datasource
@ -192,91 +60,7 @@ public class DataSourceService extends BaseService {
* @param id datasource id * @param id datasource id
* @return data source detail * @return data source detail
*/ */
public Map<String, Object> queryDataSource(int id) { Map<String, Object> queryDataSource(int id);
Map<String, Object> result = new HashMap<String, Object>(5);
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
// type
String dataSourceType = dataSource.getType().toString();
// name
String dataSourceName = dataSource.getName();
// desc
String desc = dataSource.getNote();
// parameter
String parameter = dataSource.getConnectionParams();
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter);
DbConnectType connectType = null;
String hostSeperator = Constants.DOUBLE_SLASH;
if (DbType.ORACLE.equals(dataSource.getType())) {
connectType = ((OracleDataSource) datasourceForm).getConnectType();
if (DbConnectType.ORACLE_SID.equals(connectType)) {
hostSeperator = Constants.AT_SIGN;
}
}
String database = datasourceForm.getDatabase();
// jdbc connection params
String other = datasourceForm.getOther();
String address = datasourceForm.getAddress();
String[] hostsPorts = getHostsAndPort(address, hostSeperator);
// ip host
String host = hostsPorts[0];
// prot
String port = hostsPorts[1];
String separator = "";
switch (dataSource.getType()) {
case HIVE:
case SQLSERVER:
separator = ";";
break;
case MYSQL:
case POSTGRESQL:
case CLICKHOUSE:
case ORACLE:
case PRESTO:
separator = "&";
break;
default:
separator = "&";
break;
}
Map<String, String> otherMap = new LinkedHashMap<String, String>();
if (other != null) {
String[] configs = other.split(separator);
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
}
Map<String, Object> map = new HashMap<>(10);
map.put(NAME, dataSourceName);
map.put(NOTE, desc);
map.put(TYPE, dataSourceType);
if (connectType != null) {
map.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
map.put(HOST, host);
map.put(PORT, port);
map.put(PRINCIPAL, datasourceForm.getPrincipal());
map.put(Constants.KERBEROS_KRB5_CONF_PATH, datasourceForm.getJavaSecurityKrb5Conf());
map.put(Constants.KERBEROS_KEY_TAB_USERNAME, datasourceForm.getLoginUserKeytabUsername());
map.put(Constants.KERBEROS_KEY_TAB_PATH, datasourceForm.getLoginUserKeytabPath());
map.put(DATABASE, database);
map.put(USER_NAME, datasourceForm.getUser());
map.put(OTHER, otherMap);
result.put(Constants.DATA_LIST, map);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* query datasource list by keyword * query datasource list by keyword
@ -287,44 +71,7 @@ public class DataSourceService extends BaseService {
* @param pageSize page size * @param pageSize page size
* @return data source list page * @return data source list page
*/ */
public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize);
Map<String, Object> result = new HashMap<>();
IPage<DataSource> dataSourceList = null;
Page<DataSource> dataSourcePage = new Page(pageNo, pageSize);
if (isAdmin(loginUser)) {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
} else {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal);
}
List<DataSource> dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>();
handlePasswd(dataSources);
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
pageInfo.setTotalCount((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L));
pageInfo.setLists(dataSources);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* handle datasource connection password for safety
*
* @param dataSourceList
*/
private void handlePasswd(List<DataSource> dataSourceList) {
for (DataSource dataSource : dataSourceList) {
String connectionParams = dataSource.getConnectionParams();
ObjectNode object = JSONUtils.parseObject(connectionParams);
object.put(Constants.PASSWORD, Constants.XXXXXX);
dataSource.setConnectionParams(object.toString());
}
}
/** /**
* query data resource list * query data resource list
@ -333,22 +80,7 @@ public class DataSourceService extends BaseService {
* @param type data source type * @param type data source type
* @return data source list page * @return data source list page
*/ */
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) { Map<String, Object> queryDataSourceList(User loginUser, Integer type);
Map<String, Object> result = new HashMap<>();
List<DataSource> datasourceList;
if (isAdmin(loginUser)) {
datasourceList = dataSourceMapper.listAllDataSourceByType(type);
} else {
datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type);
}
result.put(Constants.DATA_LIST, datasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* verify datasource exists * verify datasource exists
@ -356,18 +88,7 @@ public class DataSourceService extends BaseService {
* @param name datasource name * @param name datasource name
* @return true if data datasource not exists, otherwise return false * @return true if data datasource not exists, otherwise return false
*/ */
public Result<Object> verifyDataSourceName(String name) { Result<Object> verifyDataSourceName(String name);
Result<Object> result = new Result<>();
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name);
if (dataSourceList != null && dataSourceList.size() > 0) {
logger.error("datasource name:{} has exist, can't create again.", name);
putMsg(result, Status.DATASOURCE_EXIST);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/** /**
* check connection * check connection
@ -376,25 +97,7 @@ public class DataSourceService extends BaseService {
* @param parameter data source parameters * @param parameter data source parameters
* @return true if connect successfully, otherwise false * @return true if connect successfully, otherwise false
*/ */
public Result<Object> checkConnection(DbType type, String parameter) { Result<Object> checkConnection(DbType type, String parameter);
Result<Object> result = new Result<>();
BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter);
if (datasource == null) {
putMsg(result, Status.DATASOURCE_TYPE_NOT_EXIST, type);
return result;
}
try (Connection connection = datasource.getConnection()) {
if (connection == null) {
putMsg(result, Status.CONNECTION_TEST_FAILURE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
} catch (Exception e) {
logger.error("datasource test connection error, dbType:{}, jdbcUrl:{}, message:{}.", type, datasource.getJdbcUrl(), e.getMessage());
return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(),e.getMessage());
}
}
/** /**
* test connection * test connection
@ -402,15 +105,7 @@ public class DataSourceService extends BaseService {
* @param id datasource id * @param id datasource id
* @return connect result code * @return connect result code
*/ */
public Result<Object> connectionTest(int id) { Result<Object> connectionTest(int id);
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
Result<Object> result = new Result<>();
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
return checkConnection(dataSource.getType(), dataSource.getConnectionParams());
}
/** /**
* build paramters * build paramters
@ -425,116 +120,10 @@ public class DataSourceService extends BaseService {
* @param principal principal * @param principal principal
* @return datasource parameter * @return datasource parameter
*/ */
public String buildParameter(DbType type, String host, String buildParameter(DbType type, String host,
String port, String database, String principal, String userName, String port, String database, String principal, String userName,
String password, DbConnectType connectType, String other, String password, DbConnectType connectType, String other,
String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) { String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath);
String address = buildAddress(type, host, port, connectType);
Map<String, Object> parameterMap = new LinkedHashMap<String, Object>(6);
String jdbcUrl;
if (DbType.SQLSERVER == type) {
jdbcUrl = address + ";databaseName=" + database;
} else {
jdbcUrl = address + "/" + database;
}
if (Constants.ORACLE.equals(type.name())) {
parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
if (CommonUtils.getKerberosStartupState()
&& (type == DbType.HIVE || type == DbType.SPARK)) {
jdbcUrl += ";principal=" + principal;
}
String separator = "";
if (Constants.MYSQL.equals(type.name())
|| Constants.POSTGRESQL.equals(type.name())
|| Constants.CLICKHOUSE.equals(type.name())
|| Constants.ORACLE.equals(type.name())
|| Constants.PRESTO.equals(type.name())) {
separator = "&";
} else if (Constants.HIVE.equals(type.name())
|| Constants.SPARK.equals(type.name())
|| Constants.DB2.equals(type.name())
|| Constants.SQLSERVER.equals(type.name())) {
separator = ";";
}
parameterMap.put(TYPE, connectType);
parameterMap.put(Constants.ADDRESS, address);
parameterMap.put(Constants.DATABASE, database);
parameterMap.put(Constants.JDBC_URL, jdbcUrl);
parameterMap.put(Constants.USER, userName);
parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password));
if (CommonUtils.getKerberosStartupState()
&& (type == DbType.HIVE || type == DbType.SPARK)) {
parameterMap.put(Constants.PRINCIPAL, principal);
parameterMap.put(Constants.KERBEROS_KRB5_CONF_PATH, javaSecurityKrb5Conf);
parameterMap.put(Constants.KERBEROS_KEY_TAB_USERNAME, loginUserKeytabUsername);
parameterMap.put(Constants.KERBEROS_KEY_TAB_PATH, loginUserKeytabPath);
}
Map<String, String> map = JSONUtils.toMap(other);
if (map != null) {
StringBuilder otherSb = new StringBuilder();
for (Map.Entry<String, String> entry: map.entrySet()) {
otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator));
}
if (!Constants.DB2.equals(type.name())) {
otherSb.deleteCharAt(otherSb.length() - 1);
}
parameterMap.put(Constants.OTHER, otherSb);
}
if (logger.isDebugEnabled()) {
logger.info("parameters map:{}", JSONUtils.toJsonString(parameterMap));
}
return JSONUtils.toJsonString(parameterMap);
}
private String buildAddress(DbType type, String host, String port, DbConnectType connectType) {
StringBuilder sb = new StringBuilder();
if (Constants.MYSQL.equals(type.name())) {
sb.append(Constants.JDBC_MYSQL);
sb.append(host).append(":").append(port);
} else if (Constants.POSTGRESQL.equals(type.name())) {
sb.append(Constants.JDBC_POSTGRESQL);
sb.append(host).append(":").append(port);
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) {
sb.append(Constants.JDBC_HIVE_2);
String[] hostArray = host.split(",");
if (hostArray.length > 0) {
for (String zkHost : hostArray) {
sb.append(String.format("%s:%s,", zkHost, port));
}
sb.deleteCharAt(sb.length() - 1);
}
} else if (Constants.CLICKHOUSE.equals(type.name())) {
sb.append(Constants.JDBC_CLICKHOUSE);
sb.append(host).append(":").append(port);
} else if (Constants.ORACLE.equals(type.name())) {
if (connectType == DbConnectType.ORACLE_SID) {
sb.append(Constants.JDBC_ORACLE_SID);
} else {
sb.append(Constants.JDBC_ORACLE_SERVICE_NAME);
}
sb.append(host).append(":").append(port);
} else if (Constants.SQLSERVER.equals(type.name())) {
sb.append(Constants.JDBC_SQLSERVER);
sb.append(host).append(":").append(port);
} else if (Constants.DB2.equals(type.name())) {
sb.append(Constants.JDBC_DB2);
sb.append(host).append(":").append(port);
} else if (Constants.PRESTO.equals(type.name())) {
sb.append(Constants.JDBC_PRESTO);
sb.append(host).append(":").append(port);
}
return sb.toString();
}
/** /**
* delete datasource * delete datasource
@ -543,30 +132,7 @@ public class DataSourceService extends BaseService {
* @param datasourceId data source id * @param datasourceId data source id
* @return delete result code * @return delete result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) Result<Object> delete(User loginUser, int datasourceId);
public Result<Object> delete(User loginUser, int datasourceId) {
Result<Object> result = new Result<>();
try {
//query datasource by id
DataSource dataSource = dataSourceMapper.selectById(datasourceId);
if (dataSource == null) {
logger.error("resource id {} not exist", datasourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
dataSourceMapper.deleteById(datasourceId);
datasourceUserMapper.deleteByDatasourceId(datasourceId);
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
logger.error("delete datasource error", e);
throw new RuntimeException("delete datasource error");
}
return result;
}
/** /**
* unauthorized datasource * unauthorized datasource
@ -575,38 +141,7 @@ public class DataSourceService extends BaseService {
* @param userId user id * @param userId user id
* @return unauthed data source result code * @return unauthed data source result code
*/ */
public Map<String, Object> unauthDatasource(User loginUser, Integer userId) { Map<String, Object> unauthDatasource(User loginUser, Integer userId);
Map<String, Object> result = new HashMap<>();
//only admin operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
/**
* query all data sources except userId
*/
List<DataSource> resultList = new ArrayList<>();
List<DataSource> datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId);
Set<DataSource> datasourceSet = null;
if (datasourceList != null && datasourceList.size() > 0) {
datasourceSet = new HashSet<>(datasourceList);
List<DataSource> authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId);
Set<DataSource> authedDataSourceSet = null;
if (authedDataSourceList != null && authedDataSourceList.size() > 0) {
authedDataSourceSet = new HashSet<>(authedDataSourceList);
datasourceSet.removeAll(authedDataSourceSet);
}
resultList = new ArrayList<>(datasourceSet);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* authorized datasource * authorized datasource
@ -615,50 +150,5 @@ public class DataSourceService extends BaseService {
* @param userId user id * @param userId user id
* @return authorized result code * @return authorized result code
*/ */
public Map<String, Object> authedDatasource(User loginUser, Integer userId) { Map<String, Object> authedDatasource(User loginUser, Integer userId);
Map<String, Object> result = new HashMap<>();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<DataSource> authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId);
result.put(Constants.DATA_LIST, authedDatasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get host and port by address
*
* @param address address
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address) {
return getHostsAndPort(address, Constants.DOUBLE_SLASH);
}
/**
* get host and port by address
*
* @param address address
* @param separator separator
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address, String separator) {
String[] result = new String[2];
String[] tmpArray = address.split(separator);
String hostsAndPorts = tmpArray[tmpArray.length - 1];
StringBuilder hosts = new StringBuilder();
String[] hostPortArray = hostsAndPorts.split(Constants.COMMA);
String port = hostPortArray[0].split(Constants.COLON)[1];
for (String hostPort : hostPortArray) {
hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA);
}
hosts.deleteCharAt(hosts.length() - 1);
result[0] = hosts.toString();
result[1] = port;
return result;
}
} }

509
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java

@ -17,82 +17,22 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT;
import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.enums.ExecuteType;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.enums.RunMode;
import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map; import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/** /**
* executor service * executor service
*/ */
@Service public interface ExecutorService {
public class ExecutorService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(ExecutorService.class);
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private MonitorService monitorService;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private ProcessService processService;
/** /**
* execute process instance * execute process instance
@ -113,80 +53,14 @@ public class ExecutorService extends BaseService {
* @param timeout timeout * @param timeout timeout
* @param startParams the global param values which pass to new process instance * @param startParams the global param values which pass to new process instance
* @return execute process instance code * @return execute process instance code
* @throws ParseException Parse Exception
*/
public Map<String, Object> execProcessInstance(User loginUser, String projectName,
int processDefinitionId, String cronTime, CommandType commandType,
FailureStrategy failureStrategy, String startNodeList,
TaskDependType taskDependType, WarningType warningType, int warningGroupId,
RunMode runMode,
Priority processInstancePriority, String workerGroup, Integer timeout,
Map<String, String> startParams) throws ParseException {
Map<String, Object> result = new HashMap<>();
// timeout is invalid
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) {
putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR);
return result;
}
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project);
if (checkResultAndAuth != null) {
return checkResultAndAuth;
}
// check process define release state
ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId);
result = checkProcessDefinitionValid(processDefinition, processDefinitionId);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (!checkTenantSuitable(processDefinition)) {
logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ",
processDefinition.getId(), processDefinition.getName());
putMsg(result, Status.TENANT_NOT_SUITABLE);
return result;
}
// check master exists
if (!checkMasterExists(result)) {
return result;
}
/**
* create command
*/
int create = this.createCommand(commandType, processDefinitionId,
taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(),
warningGroupId, runMode, processInstancePriority, workerGroup, startParams);
if (create > 0) {
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.START_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* check whether master exists
*
* @param result result
* @return master exists return true , otherwise return false
*/ */
private boolean checkMasterExists(Map<String, Object> result) { Map<String, Object> execProcessInstance(User loginUser, String projectName,
// check master server exists int processDefinitionId, String cronTime, CommandType commandType,
List<Server> masterServers = monitorService.getServerListFromZK(true); FailureStrategy failureStrategy, String startNodeList,
TaskDependType taskDependType, WarningType warningType, int warningGroupId,
// no master RunMode runMode,
if (masterServers.size() == 0) { Priority processInstancePriority, String workerGroup, Integer timeout,
putMsg(result, Status.MASTER_NOT_EXISTS); Map<String, String> startParams);
return false;
}
return true;
}
/** /**
* check whether the process definition can be executed * check whether the process definition can be executed
@ -195,19 +69,7 @@ public class ExecutorService extends BaseService {
* @param processDefineId process definition id * @param processDefineId process definition id
* @return check result code * @return check result code
*/ */
public Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId) { Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId);
Map<String, Object> result = new HashMap<>();
if (processDefinition == null) {
// check process definition exists
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId);
} else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
// check process definition online
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefineId);
} else {
result.put(Constants.STATUS, Status.SUCCESS);
}
return result;
}
/** /**
* do action to process instancepause, stop, repeat, recover from pause, recover from stop * do action to process instancepause, stop, repeat, recover from pause, recover from stop
@ -218,194 +80,7 @@ public class ExecutorService extends BaseService {
* @param executeType execute type * @param executeType execute type
* @return execute result code * @return execute result code
*/ */
public Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = checkResultAndAuth(loginUser, projectName, project);
if (checkResult != null) {
return checkResult;
}
// check master exists
if (!checkMasterExists(result)) {
return result;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) {
result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
}
checkResult = checkExecuteType(processInstance, executeType);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
if (!checkTenantSuitable(processDefinition)) {
logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ",
processDefinition.getId(), processDefinition.getName());
putMsg(result, Status.TENANT_NOT_SUITABLE);
}
switch (executeType) {
case REPEAT_RUNNING:
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING);
break;
case RECOVER_SUSPENDED_PROCESS:
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS);
break;
case START_FAILURE_TASK_PROCESS:
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS);
break;
case STOP:
if (processInstance.getState() == ExecutionStatus.READY_STOP) {
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState());
} else {
result = updateProcessInstancePrepare(processInstance, CommandType.STOP, ExecutionStatus.READY_STOP);
}
break;
case PAUSE:
if (processInstance.getState() == ExecutionStatus.READY_PAUSE) {
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState());
} else {
result = updateProcessInstancePrepare(processInstance, CommandType.PAUSE, ExecutionStatus.READY_PAUSE);
}
break;
default:
logger.error("unknown execute type : {}", executeType);
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type");
break;
}
return result;
}
/**
* check tenant suitable
*
* @param processDefinition process definition
* @return true if tenant suitable, otherwise return false
*/
private boolean checkTenantSuitable(ProcessDefinition processDefinition) {
// checkTenantExists();
Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(),
processDefinition.getUserId());
return tenant != null;
}
/**
* Check the state of process instance and the type of operation match
*
* @param processInstance process instance
* @param executeType execute type
* @return check result code
*/
private Map<String, Object> checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) {
Map<String, Object> result = new HashMap<>();
ExecutionStatus executionStatus = processInstance.getState();
boolean checkResult = false;
switch (executeType) {
case PAUSE:
case STOP:
if (executionStatus.typeIsRunning()) {
checkResult = true;
}
break;
case REPEAT_RUNNING:
if (executionStatus.typeIsFinished()) {
checkResult = true;
}
break;
case START_FAILURE_TASK_PROCESS:
if (executionStatus.typeIsFailure()) {
checkResult = true;
}
break;
case RECOVER_SUSPENDED_PROCESS:
if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) {
checkResult = true;
}
break;
default:
break;
}
if (!checkResult) {
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString());
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* prepare to update process instance command type and status
*
* @param processInstance process instance
* @param commandType command type
* @param executionStatus execute status
* @return update result
*/
private Map<String, Object> updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) {
Map<String, Object> result = new HashMap<>();
processInstance.setCommandType(commandType);
processInstance.addHistoryCmd(commandType);
processInstance.setState(executionStatus);
int update = processService.updateProcessInstance(processInstance);
// determine whether the process is normal
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* insert command, used in the implementation of the page, re run, recovery (pause / failure) execution
*
* @param loginUser login user
* @param instanceId instance id
* @param processDefinitionId process definition id
* @param commandType command type
* @return insert result code
*/
private Map<String, Object> insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) {
Map<String, Object> result = new HashMap<>();
Command command = new Command();
command.setCommandType(commandType);
command.setProcessDefinitionId(processDefinitionId);
command.setCommandParam(String.format("{\"%s\":%d}",
CMD_PARAM_RECOVER_PROCESS_ID_STRING, instanceId));
command.setExecutorId(loginUser.getId());
if (!processService.verifyIsNeedCreateCommand(command)) {
putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionId);
return result;
}
int create = processService.createCommand(command);
if (create > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR);
}
return result;
}
/** /**
* check if sub processes are offline before starting process definition * check if sub processes are offline before starting process definition
@ -413,167 +88,5 @@ public class ExecutorService extends BaseService {
* @param processDefineId process definition id * @param processDefineId process definition id
* @return check result code * @return check result code
*/ */
public Map<String, Object> startCheckByProcessDefinedId(int processDefineId) { Map<String, Object> startCheckByProcessDefinedId(int processDefineId);
Map<String, Object> result = new HashMap<>();
if (processDefineId == 0) {
logger.error("process definition id is null");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "process definition id");
}
List<Integer> ids = new ArrayList<>();
processService.recurseFindSubProcessId(processDefineId, ids);
Integer[] idArray = ids.toArray(new Integer[ids.size()]);
if (!ids.isEmpty()) {
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray);
if (processDefinitionList != null) {
for (ProcessDefinition processDefinition : processDefinitionList) {
/**
* if there is no online process, exit directly
*/
if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName());
logger.info("not release process definition id: {} , name : {}",
processDefinition.getId(), processDefinition.getName());
return result;
}
}
}
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* create command
*
* @param commandType commandType
* @param processDefineId processDefineId
* @param nodeDep nodeDep
* @param failureStrategy failureStrategy
* @param startNodeList startNodeList
* @param schedule schedule
* @param warningType warningType
* @param executorId executorId
* @param warningGroupId warningGroupId
* @param runMode runMode
* @param processInstancePriority processInstancePriority
* @param workerGroup workerGroup
* @return command id
*/
private int createCommand(CommandType commandType, int processDefineId,
TaskDependType nodeDep, FailureStrategy failureStrategy,
String startNodeList, String schedule, WarningType warningType,
int executorId, int warningGroupId,
RunMode runMode, Priority processInstancePriority, String workerGroup,
Map<String, String> startParams) throws ParseException {
/**
* instantiate command schedule instance
*/
Command command = new Command();
Map<String, String> cmdParam = new HashMap<>();
if (commandType == null) {
command.setCommandType(CommandType.START_PROCESS);
} else {
command.setCommandType(commandType);
}
command.setProcessDefinitionId(processDefineId);
if (nodeDep != null) {
command.setTaskDependType(nodeDep);
}
if (failureStrategy != null) {
command.setFailureStrategy(failureStrategy);
}
if (StringUtils.isNotEmpty(startNodeList)) {
cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList);
}
if (warningType != null) {
command.setWarningType(warningType);
}
if (startParams != null && startParams.size() > 0) {
cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams));
}
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
command.setExecutorId(executorId);
command.setWarningGroupId(warningGroupId);
command.setProcessInstancePriority(processInstancePriority);
command.setWorkerGroup(workerGroup);
Date start = null;
Date end = null;
if (StringUtils.isNotEmpty(schedule)) {
String[] interval = schedule.split(",");
if (interval.length == 2) {
start = DateUtils.getScheduleDate(interval[0]);
end = DateUtils.getScheduleDate(interval[1]);
}
}
// determine whether to complement
if (commandType == CommandType.COMPLEMENT_DATA) {
runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode;
if (null != start && null != end && !start.after(end)) {
if (runMode == RunMode.RUN_MODE_SERIAL) {
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
return processService.createCommand(command);
} else if (runMode == RunMode.RUN_MODE_PARALLEL) {
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId);
List<Date> listDate = new LinkedList<>();
if (!CollectionUtils.isEmpty(schedules)) {
for (Schedule item : schedules) {
listDate.addAll(CronUtils.getSelfFireDateList(start, end, item.getCrontab()));
}
}
if (!CollectionUtils.isEmpty(listDate)) {
// loop by schedule date
for (Date date : listDate) {
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
processService.createCommand(command);
}
return listDate.size();
} else {
// loop by day
int runCunt = 0;
while (!start.after(end)) {
runCunt += 1;
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
processService.createCommand(command);
start = DateUtils.getSomeDay(start, 1);
}
return runCunt;
}
}
} else {
logger.error("there is not valid schedule date for the process definition: id:{},date:{}",
processDefineId, schedule);
}
} else {
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
return processService.createCommand(command);
}
return 0;
}
/**
* check result and auth
*/
private Map<String, Object> checkResultAndAuth(User loginUser, String projectName, Project project) {
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
return null;
}
} }

3
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java

@ -14,12 +14,13 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
/** /**
* log service * logger service
*/ */
public interface LoggerService { public interface LoggerService {

168
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java

@ -14,143 +14,51 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import static org.apache.dolphinscheduler.common.utils.Preconditions.checkNotNull; import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.ZookeeperMonitor;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ZKNodeType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.model.WorkerServerModel;
import org.apache.dolphinscheduler.dao.MonitorDBDao;
import org.apache.dolphinscheduler.dao.entity.MonitorRecord;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.google.common.collect.Sets;
/** /**
* monitor service * monitor service
*/ */
@Service public interface MonitorService {
public class MonitorService extends BaseService {
/**
@Autowired * query database state
private ZookeeperMonitor zookeeperMonitor; *
* @param loginUser login user
@Autowired * @return data base state
private MonitorDBDao monitorDBDao; */
/** Map<String,Object> queryDatabaseState(User loginUser);
* query database state
* /**
* @param loginUser login user * query master list
* @return data base state *
*/ * @param loginUser login user
public Map<String,Object> queryDatabaseState(User loginUser) { * @return master information list
Map<String, Object> result = new HashMap<>(); */
Map<String,Object> queryMaster(User loginUser);
List<MonitorRecord> monitorRecordList = monitorDBDao.queryDatabaseState();
/**
result.put(Constants.DATA_LIST, monitorRecordList); * query zookeeper state
putMsg(result, Status.SUCCESS); *
* @param loginUser login user
return result; * @return zookeeper information list
*/
} Map<String,Object> queryZookeeperState(User loginUser);
/** /**
* query master list * query worker list
* *
* @param loginUser login user * @param loginUser login user
* @return master information list * @return worker information list
*/ */
public Map<String,Object> queryMaster(User loginUser) { Map<String,Object> queryWorker(User loginUser);
Map<String, Object> result = new HashMap<>(); List<Server> getServerListFromZK(boolean isMaster);
List<Server> masterServers = getServerListFromZK(true);
result.put(Constants.DATA_LIST, masterServers);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* query zookeeper state
*
* @param loginUser login user
* @return zookeeper information list
*/
public Map<String,Object> queryZookeeperState(User loginUser) {
Map<String, Object> result = new HashMap<>();
List<ZookeeperRecord> zookeeperRecordList = zookeeperMonitor.zookeeperInfoList();
result.put(Constants.DATA_LIST, zookeeperRecordList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query worker list
*
* @param loginUser login user
* @return worker information list
*/
public Map<String,Object> queryWorker(User loginUser) {
Map<String, Object> result = new HashMap<>();
List<WorkerServerModel> workerServers = getServerListFromZK(false)
.stream()
.map((Server server) -> {
WorkerServerModel model = new WorkerServerModel();
model.setId(server.getId());
model.setHost(server.getHost());
model.setPort(server.getPort());
model.setZkDirectories(Sets.newHashSet(server.getZkDirectory()));
model.setResInfo(server.getResInfo());
model.setCreateTime(server.getCreateTime());
model.setLastHeartbeatTime(server.getLastHeartbeatTime());
return model;
})
.collect(Collectors.toList());
Map<String, WorkerServerModel> workerHostPortServerMapping = workerServers
.stream()
.collect(Collectors.toMap(
(WorkerServerModel worker) -> {
String[] s = worker.getZkDirectories().iterator().next().split("/");
return s[s.length - 1];
}
, Function.identity()
, (WorkerServerModel oldOne, WorkerServerModel newOne) -> {
oldOne.getZkDirectories().addAll(newOne.getZkDirectories());
return oldOne;
}));
result.put(Constants.DATA_LIST, workerHostPortServerMapping.values());
putMsg(result,Status.SUCCESS);
return result;
}
public List<Server> getServerListFromZK(boolean isMaster) {
checkNotNull(zookeeperMonitor);
ZKNodeType zkNodeType = isMaster ? ZKNodeType.MASTER : ZKNodeType.WORKER;
return zookeeperMonitor.getServersList(zkNodeType);
}
} }

3
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java

@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map; import java.util.Map;
/**
* process definition version service
*/
public interface ProcessDefinitionVersionService { public interface ProcessDefinitionVersionService {
/** /**

610
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java

@ -17,157 +17,26 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE;
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DependResult; import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException; import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/** /**
* process instance service * process instance service
*/ */
@Service public interface ProcessInstanceService {
public class ProcessInstanceService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceService.class);
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessService processService;
@Autowired
ProcessInstanceMapper processInstanceMapper;
@Autowired
ProcessDefinitionMapper processDefineMapper;
@Autowired
ProcessDefinitionService processDefinitionService;
@Autowired
ProcessDefinitionVersionService processDefinitionVersionService;
@Autowired
ExecutorService execService;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
LoggerService loggerService;
@Autowired
UsersService usersService;
/** /**
* return top n SUCCESS process instance order by running time which started between startTime and endTime * return top n SUCCESS process instance order by running time which started between startTime and endTime
*/ */
public Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) { Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
if (0 > size) {
putMsg(result, Status.NEGTIVE_SIZE_NUMBER_ERROR, size);
return result;
}
if (Objects.isNull(startTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.START_TIME);
return result;
}
Date start = DateUtils.stringToDate(startTime);
if (Objects.isNull(endTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.END_TIME);
return result;
}
Date end = DateUtils.stringToDate(endTime);
if (start == null || end == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate");
return result;
}
if (start.getTime() > end.getTime()) {
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR, startTime, endTime);
return result;
}
List<ProcessInstance> processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS);
result.put(DATA_LIST, processInstances);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* query process instance by id * query process instance by id
@ -177,24 +46,7 @@ public class ProcessInstanceService extends BaseService {
* @param processId process instance id * @param processId process instance id
* @return process instance detail * @return process instance detail
*/ */
public Map<String, Object> queryProcessInstanceById(User loginUser, String projectName, Integer processId) { Map<String, Object> queryProcessInstanceById(User loginUser, String projectName, Integer processId);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
processInstance.setWarningGroupId(processDefinition.getWarningGroupId());
result.put(DATA_LIST, processInstance);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* paging query process instance list, filtering according to project, process definition, time range, keyword, process status * paging query process instance list, filtering according to project, process definition, time range, keyword, process status
@ -211,64 +63,10 @@ public class ProcessInstanceService extends BaseService {
* @param endDate end time * @param endDate end time
* @return process instance list * @return process instance list
*/ */
public Map<String, Object> queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, Map<String, Object> queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId,
String startDate, String endDate, String startDate, String endDate,
String searchVal, String executorName, ExecutionStatus stateType, String host, String searchVal, String executorName, ExecutionStatus stateType, String host,
Integer pageNo, Integer pageSize) { Integer pageNo, Integer pageSize);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
int[] statusArray = null;
// filter by state
if (stateType != null) {
statusArray = new int[]{stateType.ordinal()};
}
Date start = null;
Date end = null;
try {
if (StringUtils.isNotEmpty(startDate)) {
start = DateUtils.getScheduleDate(startDate);
}
if (StringUtils.isNotEmpty(endDate)) {
end = DateUtils.getScheduleDate(endDate);
}
} catch (Exception e) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate");
return result;
}
Page<ProcessInstance> page = new Page<>(pageNo, pageSize);
PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize);
int executorId = usersService.getUserIdByName(executorName);
IPage<ProcessInstance> processInstanceList =
processInstanceMapper.queryProcessInstanceListPaging(page,
project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end);
List<ProcessInstance> processInstances = processInstanceList.getRecords();
for (ProcessInstance processInstance : processInstances) {
processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime()));
User executor = usersService.queryUser(processInstance.getExecutorId());
if (null != executor) {
processInstance.setExecutorName(executor.getUserName());
}
}
pageInfo.setTotalCount((int) processInstanceList.getTotal());
pageInfo.setLists(processInstances);
result.put(DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* query task list by process instance id * query task list by process instance id
@ -279,71 +77,9 @@ public class ProcessInstanceService extends BaseService {
* @return task list for the process instance * @return task list for the process instance
* @throws IOException io exception * @throws IOException io exception
*/ */
public Map<String, Object> queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException { Map<String, Object> queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException;
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processId);
addDependResultForTaskList(taskInstanceList);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString());
resultMap.put(TASK_LIST, taskInstanceList);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* add dependent result for dependent task
*/
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
for (TaskInstance taskInstance : taskInstanceList) {
if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) {
Result<String> logResult = loggerService.queryLog(
taskInstance.getId(), 0, 4098);
if (logResult.getCode() == Status.SUCCESS.ordinal()) {
String log = logResult.getData();
Map<String, DependResult> resultMap = parseLogForDependentResult(log);
taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap));
}
}
}
}
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException { Map<String, DependResult> parseLogForDependentResult(String log) throws IOException;
Map<String, DependResult> resultMap = new HashMap<>();
if (StringUtils.isEmpty(log)) {
return resultMap;
}
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line;
while ((line = br.readLine()) != null) {
if (line.contains(DEPENDENT_SPLIT)) {
String[] tmpStringArray = line.split(":\\|\\|");
if (tmpStringArray.length != 2) {
continue;
}
String dependResultString = tmpStringArray[1];
String[] dependStringArray = dependResultString.split(",");
if (dependStringArray.length != 2) {
continue;
}
String key = dependStringArray[0].trim();
DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim());
resultMap.put(key, dependResult);
}
}
return resultMap;
}
/** /**
* query sub process instance detail info by task id * query sub process instance detail info by task id
@ -353,38 +89,7 @@ public class ProcessInstanceService extends BaseService {
* @param taskId task id * @param taskId task id
* @return sub process instance detail * @return sub process instance detail
*/ */
public Map<String, Object> querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId) { Map<String, Object> querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
TaskInstance taskInstance = processService.findTaskInstanceById(taskId);
if (taskInstance == null) {
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
}
if (!taskInstance.isSubProcess()) {
putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName());
return result;
}
ProcessInstance subWorkflowInstance = processService.findSubProcessInstance(
taskInstance.getProcessInstanceId(), taskInstance.getId());
if (subWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put("subProcessInstanceId", subWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* update process instance * update process instance
@ -401,97 +106,9 @@ public class ProcessInstanceService extends BaseService {
* @return update result code * @return update result code
* @throws ParseException parse exception for json parse * @throws ParseException parse exception for json parse
*/ */
public Map<String, Object> updateProcessInstance(User loginUser, String projectName, Integer processInstanceId, Map<String, Object> updateProcessInstance(User loginUser, String projectName, Integer processInstanceId,
String processInstanceJson, String scheduleTime, Boolean syncDefine, String processInstanceJson, String scheduleTime, Boolean syncDefine,
Flag flag, String locations, String connects) throws ParseException { Flag flag, String locations, String connects) throws ParseException;
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
//check project permission
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
//check process instance exists
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
//check process instance status
if (!processInstance.getState().typeIsFinished()) {
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR,
processInstance.getName(), processInstance.getState().toString(), "update");
return result;
}
Date schedule = null;
schedule = processInstance.getScheduleTime();
if (scheduleTime != null) {
schedule = DateUtils.getScheduleDate(scheduleTime);
}
processInstance.setScheduleTime(schedule);
processInstance.setLocations(locations);
processInstance.setConnects(connects);
String globalParams = null;
String originDefParams = null;
int timeout = processInstance.getTimeout();
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
if (StringUtils.isNotEmpty(processInstanceJson)) {
ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class);
//check workflow json is valid
Map<String, Object> checkFlowJson = processDefinitionService.checkProcessNodeList(processData, processInstanceJson);
if (checkFlowJson.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
originDefParams = JSONUtils.toJsonString(processData.getGlobalParams());
List<Property> globalParamList = processData.getGlobalParams();
Map<String, String> globalParamMap = Optional.ofNullable(globalParamList).orElse(Collections.emptyList()).stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList,
processInstance.getCmdTypeIfComplement(), schedule);
timeout = processData.getTimeout();
processInstance.setTimeout(timeout);
Tenant tenant = processService.getTenantForProcess(processData.getTenantId(),
processDefinition.getUserId());
if (tenant != null) {
processInstance.setTenantCode(tenant.getTenantCode());
}
// get the processinstancejson before saving,and then save the name and taskid
String oldJson = processInstance.getProcessInstanceJson();
if (StringUtils.isNotEmpty(oldJson)) {
processInstanceJson = processService.changeJson(processData,oldJson);
}
processInstance.setProcessInstanceJson(processInstanceJson);
processInstance.setGlobalParams(globalParams);
}
int update = processService.updateProcessInstance(processInstance);
int updateDefine = 1;
if (Boolean.TRUE.equals(syncDefine)) {
processDefinition.setProcessDefinitionJson(processInstanceJson);
processDefinition.setGlobalParams(originDefParams);
processDefinition.setLocations(locations);
processDefinition.setConnects(connects);
processDefinition.setTimeout(timeout);
processDefinition.setUpdateTime(new Date());
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition);
processDefinition.setVersion(version);
updateDefine = processDefineMapper.updateById(processDefinition);
}
if (update > 0 && updateDefine > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR);
}
return result;
}
/** /**
* query parent process instance detail info by sub process instance id * query parent process instance detail info by sub process instance id
@ -501,37 +118,7 @@ public class ProcessInstanceService extends BaseService {
* @param subId sub process id * @param subId sub process id
* @return parent instance detail * @return parent instance detail
*/ */
public Map<String, Object> queryParentInstanceBySubId(User loginUser, String projectName, Integer subId) { Map<String, Object> queryParentInstanceBySubId(User loginUser, String projectName, Integer subId);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId);
if (subInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId);
return result;
}
if (subInstance.getIsSubProcess() == Flag.NO) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName());
return result;
}
ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId);
if (parentWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put("parentWorkflowInstance", parentWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* delete process instance by id, at the same timedelete task instance and their mapping relation data * delete process instance by id, at the same timedelete task instance and their mapping relation data
@ -541,38 +128,7 @@ public class ProcessInstanceService extends BaseService {
* @param processInstanceId process instance id * @param processInstanceId process instance id
* @return delete result code * @return delete result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId);
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (null == processInstance) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
processService.removeTaskLogFile(processInstanceId);
// delete database cascade
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
processService.deleteWorkProcessMapByParentId(processInstanceId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR);
}
return result;
}
/** /**
* view process instance variables * view process instance variables
@ -580,71 +136,7 @@ public class ProcessInstanceService extends BaseService {
* @param processInstanceId process instance id * @param processInstanceId process instance id
* @return variables data * @return variables data
*/ */
public Map<String, Object> viewVariables(Integer processInstanceId) { Map<String, Object> viewVariables(Integer processInstanceId);
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
Map<String, String> timeParams = BusinessTimeUtils
.getBusinessTime(processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime());
String workflowInstanceJson = processInstance.getProcessInstanceJson();
ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class);
String userDefinedParams = processInstance.getGlobalParams();
// global params
List<Property> globalParams = new ArrayList<>();
if (userDefinedParams != null && userDefinedParams.length() > 0) {
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
}
List<TaskNode> taskNodeList = workflowData.getTasks();
// global param string
String globalParamStr = JSONUtils.toJsonString(globalParams);
globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams);
globalParams = JSONUtils.toList(globalParamStr, Property.class);
for (Property property : globalParams) {
timeParams.put(property.getProp(), property.getValue());
}
// local params
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>();
for (TaskNode taskNode : taskNodeList) {
String parameter = taskNode.getParams();
Map<String, String> map = JSONUtils.toMap(parameter);
String localParams = map.get(LOCAL_PARAMS);
if (localParams != null && !localParams.isEmpty()) {
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams);
List<Property> localParamsList = JSONUtils.toList(localParams, Property.class);
Map<String, Object> localParamsMap = new HashMap<>();
localParamsMap.put("taskType", taskNode.getType());
localParamsMap.put("localParamsList", localParamsList);
if (CollectionUtils.isNotEmpty(localParamsList)) {
localUserDefParams.put(taskNode.getName(), localParamsMap);
}
}
}
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(GLOBAL_PARAMS, globalParams);
resultMap.put(LOCAL_PARAMS, localUserDefParams);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* encapsulation gantt structure * encapsulation gantt structure
@ -653,67 +145,7 @@ public class ProcessInstanceService extends BaseService {
* @return gantt tree data * @return gantt tree data
* @throws Exception exception when json parse * @throws Exception exception when json parse
*/ */
public Map<String, Object> viewGantt(Integer processInstanceId) throws Exception { Map<String, Object> viewGantt(Integer processInstanceId) throws Exception;
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
GanttDto ganttDto = new GanttDto();
DAG<String, TaskNode, TaskNodeRelation> dag = processInstance2DAG(processInstance);
//topological sort
List<String> nodeList = dag.topologicalSort();
ganttDto.setTaskNames(nodeList);
List<Task> taskList = new ArrayList<>();
for (String node : nodeList) {
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node);
if (taskInstance == null) {
continue;
}
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
Task task = new Task();
task.setTaskName(taskInstance.getName());
task.getStartDate().add(startTime.getTime());
task.getEndDate().add(endTime.getTime());
task.setIsoStart(startTime);
task.setIsoEnd(endTime);
task.setStatus(taskInstance.getState().toString());
task.setExecutionDate(taskInstance.getStartTime());
task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime()));
taskList.add(task);
}
ganttDto.setTasks(taskList);
result.put(DATA_LIST, ganttDto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* process instance to DAG
*
* @param processInstance input process instance
* @return process instance dag.
*/
private static DAG<String, TaskNode, TaskNodeRelation> processInstance2DAG(ProcessInstance processInstance) {
String processDefinitionJson = processInstance.getProcessInstanceJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
return DagHelper.buildDagGraph(processDag);
}
/** /**
* query process instance by processDefinitionId and stateArray * query process instance by processDefinitionId and stateArray
@ -721,9 +153,7 @@ public class ProcessInstanceService extends BaseService {
* @param states states array * @param states states array
* @return process instance list * @return process instance list
*/ */
public List<ProcessInstance> queryByProcessDefineIdAndStatus(int processDefinitionId, int[] states) { List<ProcessInstance> queryByProcessDefineIdAndStatus(int processDefinitionId, int[] states);
return processInstanceMapper.queryByProcessDefineIdAndStatus(processDefinitionId, states);
}
/** /**
* query process instance by processDefinitionId * query process instance by processDefinitionId
@ -731,8 +161,6 @@ public class ProcessInstanceService extends BaseService {
* @param size size * @param size size
* @return process instance list * @return process instance list
*/ */
public List<ProcessInstance> queryByProcessDefineId(int processDefinitionId,int size) { List<ProcessInstance> queryByProcessDefineId(int processDefinitionId,int size);
return processInstanceMapper.queryByProcessDefineId(processDefinitionId, size);
}
} }

1
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Project;

229
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java

@ -14,43 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.dao.entity.Queue;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.QueueMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
/** /**
* queue service * queue service
*/ */
@Service public interface QueueService {
public class QueueService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(QueueService.class);
@Autowired
private QueueMapper queueMapper;
@Autowired
private UserMapper userMapper;
/** /**
* query queue list * query queue list
@ -58,18 +33,7 @@ public class QueueService extends BaseService {
* @param loginUser login user * @param loginUser login user
* @return queue list * @return queue list
*/ */
public Map<String, Object> queryList(User loginUser) { Map<String, Object> queryList(User loginUser);
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
List<Queue> queueList = queueMapper.selectList(null);
result.put(Constants.DATA_LIST, queueList);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* query queue list paging * query queue list paging
@ -80,26 +44,7 @@ public class QueueService extends BaseService {
* @param pageSize page size * @param pageSize page size
* @return queue list * @return queue list
*/ */
public Map<String, Object> queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { Map<String, Object> queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize);
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
Page<Queue> page = new Page(pageNo, pageSize);
IPage<Queue> queueList = queueMapper.queryQueuePaging(page, searchVal);
Integer count = (int) queueList.getTotal();
PageInfo<Queue> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount(count);
pageInfo.setLists(queueList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* create queue * create queue
@ -109,45 +54,7 @@ public class QueueService extends BaseService {
* @param queueName queue name * @param queueName queue name
* @return create result * @return create result
*/ */
public Map<String, Object> createQueue(User loginUser, String queue, String queueName) { Map<String, Object> createQueue(User loginUser, String queue, String queueName);
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
if (StringUtils.isEmpty(queue)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queue");
return result;
}
if (StringUtils.isEmpty(queueName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queueName");
return result;
}
if (checkQueueNameExist(queueName)) {
putMsg(result, Status.QUEUE_NAME_EXIST, queueName);
return result;
}
if (checkQueueExist(queue)) {
putMsg(result, Status.QUEUE_VALUE_EXIST, queue);
return result;
}
Queue queueObj = new Queue();
Date now = new Date();
queueObj.setQueue(queue);
queueObj.setQueueName(queueName);
queueObj.setCreateTime(now);
queueObj.setUpdateTime(now);
queueMapper.insert(queueObj);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* update queue * update queue
@ -158,66 +65,7 @@ public class QueueService extends BaseService {
* @param queueName queue name * @param queueName queue name
* @return update result code * @return update result code
*/ */
public Map<String, Object> updateQueue(User loginUser, int id, String queue, String queueName) { Map<String, Object> updateQueue(User loginUser, int id, String queue, String queueName);
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
if (StringUtils.isEmpty(queue)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queue");
return result;
}
if (StringUtils.isEmpty(queueName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queueName");
return result;
}
Queue queueObj = queueMapper.selectById(id);
if (queueObj == null) {
putMsg(result, Status.QUEUE_NOT_EXIST, id);
return result;
}
// whether queue value or queueName is changed
if (queue.equals(queueObj.getQueue()) && queueName.equals(queueObj.getQueueName())) {
putMsg(result, Status.NEED_NOT_UPDATE_QUEUE);
return result;
}
// check queue name is exist
if (!queueName.equals(queueObj.getQueueName())
&& checkQueueNameExist(queueName)) {
putMsg(result, Status.QUEUE_NAME_EXIST, queueName);
return result;
}
// check queue value is exist
if (!queue.equals(queueObj.getQueue()) && checkQueueExist(queue)) {
putMsg(result, Status.QUEUE_VALUE_EXIST, queue);
return result;
}
// check old queue using by any user
if (checkIfQueueIsInUsing(queueObj.getQueueName(), queueName)) {
//update user related old queue
Integer relatedUserNums = userMapper.updateUserQueue(queueObj.getQueueName(), queueName);
logger.info("old queue have related {} user, exec update user success.", relatedUserNums);
}
// update queue
Date now = new Date();
queueObj.setQueue(queue);
queueObj.setQueueName(queueName);
queueObj.setUpdateTime(now);
queueMapper.updateById(queueObj);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* verify queue and queueName * verify queue and queueName
@ -226,69 +74,6 @@ public class QueueService extends BaseService {
* @param queueName queue name * @param queueName queue name
* @return true if the queue name not exists, otherwise return false * @return true if the queue name not exists, otherwise return false
*/ */
public Result verifyQueue(String queue, String queueName) { Result<Object> verifyQueue(String queue, String queueName);
Result result = new Result();
if (StringUtils.isEmpty(queue)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queue");
return result;
}
if (StringUtils.isEmpty(queueName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "queueName");
return result;
}
if (checkQueueNameExist(queueName)) {
logger.error("queue name {} has exist, can't create again.", queueName);
putMsg(result, Status.QUEUE_NAME_EXIST, queueName);
return result;
}
if (checkQueueExist(queue)) {
logger.error("queue value {} has exist, can't create again.", queue);
putMsg(result, Status.QUEUE_VALUE_EXIST, queue);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check queue exist
* if exists return truenot exists return false
* check queue exist
*
* @param queue queue
* @return true if the queue not exists, otherwise return false
*/
private boolean checkQueueExist(String queue) {
return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(queue, null));
}
/**
* check queue name exist
* if exists return truenot exists return false
*
* @param queueName queue name
* @return true if the queue name not exists, otherwise return false
*/
private boolean checkQueueNameExist(String queueName) {
return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(null, queueName));
}
/**
* check old queue name using by any user
* if need to update user
*
* @param oldQueue old queue name
* @param newQueue new queue name
* @return true if need to update user
*/
private boolean checkIfQueueIsInUsing (String oldQueue, String newQueue) {
return !oldQueue.equals(newQueue) && CollectionUtils.isNotEmpty(userMapper.queryUserListByQueue(oldQueue));
}
} }

1200
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java

File diff suppressed because it is too large Load Diff

509
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java

@ -17,77 +17,18 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.dto.ScheduleParam;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob;
import org.apache.dolphinscheduler.service.quartz.QuartzExecutors;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/** /**
* scheduler service * scheduler service
*/ */
@Service public interface SchedulerService {
public class SchedulerService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(SchedulerService.class);
@Autowired
private ProjectService projectService;
@Autowired
private ExecutorService executorService;
@Autowired
private MonitorService monitorService;
@Autowired
private ProcessService processService;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/** /**
* save schedule * save schedule
@ -103,80 +44,14 @@ public class SchedulerService extends BaseService {
* @param workerGroup worker group * @param workerGroup worker group
* @return create result code * @return create result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) Map<String, Object> insertSchedule(User loginUser, String projectName,
public Map<String, Object> insertSchedule(User loginUser, String projectName, Integer processDefineId,
Integer processDefineId, String schedule,
String schedule, WarningType warningType,
WarningType warningType, int warningGroupId,
int warningGroupId, FailureStrategy failureStrategy,
FailureStrategy failureStrategy, Priority processInstancePriority,
Priority processInstancePriority, String workerGroup);
String workerGroup) {
Map<String, Object> result = new HashMap();
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
// check work flow define release state
ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId);
result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
Schedule scheduleObj = new Schedule();
Date now = new Date();
scheduleObj.setProjectName(projectName);
scheduleObj.setProcessDefinitionId(processDefinition.getId());
scheduleObj.setProcessDefinitionName(processDefinition.getName());
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
scheduleObj.setStartTime(scheduleParam.getStartTime());
scheduleObj.setEndTime(scheduleParam.getEndTime());
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) {
logger.error(scheduleParam.getCrontab() + " verify failure");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab());
return result;
}
scheduleObj.setCrontab(scheduleParam.getCrontab());
scheduleObj.setWarningType(warningType);
scheduleObj.setWarningGroupId(warningGroupId);
scheduleObj.setFailureStrategy(failureStrategy);
scheduleObj.setCreateTime(now);
scheduleObj.setUpdateTime(now);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setUserName(loginUser.getUserName());
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setProcessInstancePriority(processInstancePriority);
scheduleObj.setWorkerGroup(workerGroup);
scheduleMapper.insert(scheduleObj);
/**
* updateProcessInstance receivers and cc by process definition id
*/
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
// return scheduler object with ID
result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId()));
putMsg(result, Status.SUCCESS);
result.put("scheduleId", scheduleObj.getId());
return result;
}
/** /**
* updateProcessInstance schedule * updateProcessInstance schedule
@ -193,95 +68,16 @@ public class SchedulerService extends BaseService {
* @param scheduleStatus schedule status * @param scheduleStatus schedule status
* @return update result code * @return update result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) Map<String, Object> updateSchedule(User loginUser,
public Map<String, Object> updateSchedule(User loginUser, String projectName,
String projectName, Integer id,
Integer id, String scheduleExpression,
String scheduleExpression, WarningType warningType,
WarningType warningType, int warningGroupId,
int warningGroupId, FailureStrategy failureStrategy,
FailureStrategy failureStrategy, ReleaseState scheduleStatus,
ReleaseState scheduleStatus, Priority processInstancePriority,
Priority processInstancePriority, String workerGroup);
String workerGroup) {
Map<String, Object> result = new HashMap<String, Object>(5);
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
// check schedule exists
Schedule schedule = scheduleMapper.selectById(id);
if (schedule == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id);
return result;
}
ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId());
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId());
return result;
}
/**
* scheduling on-line status forbid modification
*/
if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) {
return result;
}
Date now = new Date();
// updateProcessInstance param
if (StringUtils.isNotEmpty(scheduleExpression)) {
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class);
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
schedule.setStartTime(scheduleParam.getStartTime());
schedule.setEndTime(scheduleParam.getEndTime());
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) {
putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab());
return result;
}
schedule.setCrontab(scheduleParam.getCrontab());
}
if (warningType != null) {
schedule.setWarningType(warningType);
}
schedule.setWarningGroupId(warningGroupId);
if (failureStrategy != null) {
schedule.setFailureStrategy(failureStrategy);
}
if (scheduleStatus != null) {
schedule.setReleaseState(scheduleStatus);
}
schedule.setWorkerGroup(workerGroup);
schedule.setUpdateTime(now);
schedule.setProcessInstancePriority(processInstancePriority);
scheduleMapper.updateById(schedule);
/**
* updateProcessInstance recipients and cc by process definition ID
*/
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
@ -293,110 +89,10 @@ public class SchedulerService extends BaseService {
* @param scheduleStatus schedule status * @param scheduleStatus schedule status
* @return publish result code * @return publish result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) Map<String, Object> setScheduleState(User loginUser,
public Map<String, Object> setScheduleState(User loginUser, String projectName,
String projectName, Integer id,
Integer id, ReleaseState scheduleStatus);
ReleaseState scheduleStatus) {
Map<String, Object> result = new HashMap<String, Object>(5);
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
// check schedule exists
Schedule scheduleObj = scheduleMapper.selectById(id);
if (scheduleObj == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id);
return result;
}
// check schedule release state
if (scheduleObj.getReleaseState() == scheduleStatus) {
logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}",
scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus);
putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus);
return result;
}
ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId());
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId());
return result;
}
if (scheduleStatus == ReleaseState.ONLINE) {
// check process definition release state
if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
logger.info("not release process definition id: {} , name : {}",
processDefinition.getId(), processDefinition.getName());
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName());
return result;
}
// check sub process definition release state
List<Integer> subProcessDefineIds = new ArrayList<>();
processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds);
Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]);
if (subProcessDefineIds.size() > 0) {
List<ProcessDefinition> subProcessDefinitionList =
processDefinitionMapper.queryDefinitionListByIdList(idArray);
if (subProcessDefinitionList != null && subProcessDefinitionList.size() > 0) {
for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) {
/**
* if there is no online process, exit directly
*/
if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) {
logger.info("not release process definition id: {} , name : {}",
subProcessDefinition.getId(), subProcessDefinition.getName());
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId());
return result;
}
}
}
}
}
// check master server exists
List<Server> masterServers = monitorService.getServerListFromZK(true);
if (masterServers.size() == 0) {
putMsg(result, Status.MASTER_NOT_EXISTS);
return result;
}
// set status
scheduleObj.setReleaseState(scheduleStatus);
scheduleMapper.updateById(scheduleObj);
try {
switch (scheduleStatus) {
case ONLINE: {
logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers);
setSchedule(project.getId(), scheduleObj);
break;
}
case OFFLINE: {
logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers);
deleteSchedule(project.getId(), id);
break;
}
default: {
putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString());
return result;
}
}
} catch (Exception e) {
result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure");
throw new ServiceException(result.get(Constants.MSG).toString());
}
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* query schedule * query schedule
@ -409,36 +105,7 @@ public class SchedulerService extends BaseService {
* @param searchVal search value * @param searchVal search value
* @return schedule list page * @return schedule list page
*/ */
public Map<String, Object> querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) { Map<String, Object> querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize);
HashMap<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId);
return result;
}
Page<Schedule> page = new Page(pageNo, pageSize);
IPage<Schedule> scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging(
page, processDefineId, searchVal
);
PageInfo pageInfo = new PageInfo<Schedule>(pageNo, pageSize);
pageInfo.setTotalCount((int) scheduleIPage.getTotal());
pageInfo.setLists(scheduleIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* query schedule list * query schedule list
@ -447,41 +114,7 @@ public class SchedulerService extends BaseService {
* @param projectName project name * @param projectName project name
* @return schedule list * @return schedule list
*/ */
public Map<String, Object> queryScheduleList(User loginUser, String projectName) { Map<String, Object> queryScheduleList(User loginUser, String projectName);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
List<Schedule> schedules = scheduleMapper.querySchedulerListByProjectName(projectName);
result.put(Constants.DATA_LIST, schedules);
putMsg(result, Status.SUCCESS);
return result;
}
public void setSchedule(int projectId, Schedule schedule) {
int scheduleId = schedule.getId();
logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId);
Date startDate = schedule.getStartTime();
Date endDate = schedule.getEndTime();
String jobName = QuartzExecutors.buildJobName(scheduleId);
String jobGroupName = QuartzExecutors.buildJobGroupName(projectId);
Map<String, Object> dataMap = QuartzExecutors.buildDataMap(projectId, scheduleId, schedule);
QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, jobName, jobGroupName, startDate, endDate,
schedule.getCrontab(), dataMap);
}
/** /**
* delete schedule * delete schedule
@ -490,35 +123,7 @@ public class SchedulerService extends BaseService {
* @param scheduleId schedule id * @param scheduleId schedule id
* @throws RuntimeException runtime exception * @throws RuntimeException runtime exception
*/ */
public static void deleteSchedule(int projectId, int scheduleId) { void deleteSchedule(int projectId, int scheduleId);
logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId);
String jobName = QuartzExecutors.buildJobName(scheduleId);
String jobGroupName = QuartzExecutors.buildJobGroupName(projectId);
if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) {
logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId);
throw new ServiceException("set offline failure");
}
}
/**
* check valid
*
* @param result result
* @param bool bool
* @param status status
* @return check result code
*/
private boolean checkValid(Map<String, Object> result, boolean bool, Status status) {
// timeout is valid
if (bool) {
putMsg(result, status);
return true;
}
return false;
}
/** /**
* delete schedule by id * delete schedule by id
@ -528,46 +133,7 @@ public class SchedulerService extends BaseService {
* @param scheduleId scheule id * @param scheduleId scheule id
* @return delete result code * @return delete result code
*/ */
public Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
Schedule schedule = scheduleMapper.selectById(scheduleId);
if (schedule == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId);
return result;
}
// Determine if the login user is the owner of the schedule
if (loginUser.getId() != schedule.getUserId()
&& loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
// check schedule is already online
if (schedule.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId());
return result;
}
int delete = scheduleMapper.deleteById(scheduleId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
}
return result;
}
/** /**
* preview schedule * preview schedule
@ -577,24 +143,5 @@ public class SchedulerService extends BaseService {
* @param schedule schedule expression * @param schedule schedule expression
* @return the next five fire time * @return the next five fire time
*/ */
public Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule) { Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule);
Map<String, Object> result = new HashMap<>();
CronExpression cronExpression;
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
Date now = new Date();
Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime();
Date endTime = scheduleParam.getEndTime();
try {
cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab());
} catch (ParseException e) {
logger.error(e.getMessage(), e);
putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR);
return result;
}
List<Date> selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime, cronExpression, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT);
result.put(Constants.DATA_LIST, selfFireDateList.stream().map(t -> DateUtils.dateToString(t)));
putMsg(result, Status.SUCCESS);
return result;
}
} }

5
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java

@ -14,13 +14,14 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service;
import javax.servlet.http.HttpServletRequest; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.dao.entity.Session; import org.apache.dolphinscheduler.dao.entity.Session;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import javax.servlet.http.HttpServletRequest;
/** /**
* session service * session service
*/ */

154
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java

@ -17,57 +17,15 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.MessageFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/** /**
* task instance service * task instance service
*/ */
@Service public interface TaskInstanceService {
public class TaskInstanceService extends BaseService {
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessService processService;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
ProcessInstanceService processInstanceService;
@Autowired
UsersService usersService;
/** /**
* query task list by project, process instance, task name, task start time, task end time, task status, keyword paging * query task list by project, process instance, task name, task start time, task end time, task status, keyword paging
@ -85,65 +43,10 @@ public class TaskInstanceService extends BaseService {
* @param pageSize page size * @param pageSize page size
* @return task list page * @return task list page
*/ */
public Map<String, Object> queryTaskListPaging(User loginUser, String projectName, Map<String, Object> queryTaskListPaging(User loginUser, String projectName,
Integer processInstanceId, String processInstanceName, String taskName, String executorName, String startDate, Integer processInstanceId, String processInstanceName, String taskName, String executorName, String startDate,
String endDate, String searchVal, ExecutionStatus stateType, String host, String endDate, String searchVal, ExecutionStatus stateType, String host,
Integer pageNo, Integer pageSize) { Integer pageNo, Integer pageSize);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
int[] statusArray = null;
if (stateType != null) {
statusArray = new int[]{stateType.ordinal()};
}
Date start = null;
Date end = null;
if (StringUtils.isNotEmpty(startDate)) {
start = DateUtils.getScheduleDate(startDate);
if (start == null) {
return generateInvalidParamRes(result, "startDate");
}
}
if (StringUtils.isNotEmpty(endDate)) {
end = DateUtils.getScheduleDate(endDate);
if (end == null) {
return generateInvalidParamRes(result, "endDate");
}
}
Page<TaskInstance> page = new Page(pageNo, pageSize);
PageInfo pageInfo = new PageInfo<TaskInstance>(pageNo, pageSize);
int executorId = usersService.getUserIdByName(executorName);
IPage<TaskInstance> taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging(
page, project.getId(), processInstanceId, processInstanceName, searchVal, taskName, executorId, statusArray, host, start, end
);
Set<String> exclusionSet = new HashSet<>();
exclusionSet.add(Constants.CLASS);
exclusionSet.add("taskJson");
List<TaskInstance> taskInstanceList = taskInstanceIPage.getRecords();
for (TaskInstance taskInstance : taskInstanceList) {
taskInstance.setDuration(DateUtils.format2Duration(taskInstance.getStartTime(), taskInstance.getEndTime()));
User executor = usersService.queryUser(taskInstance.getExecutorId());
if (null != executor) {
taskInstance.setExecutorName(executor.getUserName());
}
}
pageInfo.setTotalCount((int) taskInstanceIPage.getTotal());
pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(), exclusionSet));
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* change one task instance's state from failure to forced success * change one task instance's state from failure to forced success
@ -153,51 +56,6 @@ public class TaskInstanceService extends BaseService {
* @param taskInstanceId task instance id * @param taskInstanceId task instance id
* @return the result code and msg * @return the result code and msg
*/ */
public Map<String, Object> forceTaskSuccess(User loginUser, String projectName, Integer taskInstanceId) { Map<String, Object> forceTaskSuccess(User loginUser, String projectName, Integer taskInstanceId);
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
// check user auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
// check whether the task instance can be found
TaskInstance task = taskInstanceMapper.selectById(taskInstanceId);
if (task == null) {
putMsg(result, Status.TASK_INSTANCE_NOT_FOUND);
return result;
}
// check whether the task instance state type is failure
if (!task.getState().typeIsFailure()) {
putMsg(result, Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState().toString());
return result;
}
// change the state of the task instance
task.setState(ExecutionStatus.FORCED_SUCCESS);
int changedNum = taskInstanceMapper.updateById(task);
if (changedNum > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.FORCE_TASK_SUCCESS_ERROR);
}
return result;
}
/***
* generate {@link org.apache.dolphinscheduler.api.enums.Status#REQUEST_PARAMS_NOT_VALID_ERROR} res with param name
* @param result exist result map
* @param params invalid params name
* @return update result map
*/
private Map<String, Object> generateInvalidParamRes(Map<String, Object> result, String params) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), params));
return result;
}
} }

48
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java

@ -14,26 +14,15 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.TaskRecordDao;
import org.apache.dolphinscheduler.dao.entity.TaskRecord;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import static org.apache.dolphinscheduler.common.Constants.*;
/** /**
* task record service * task record service
*/ */
@Service public interface TaskRecordService {
public class TaskRecordService extends BaseService{
/** /**
* query task record list paging * query task record list paging
@ -50,33 +39,8 @@ public class TaskRecordService extends BaseService{
* @param isHistory is history * @param isHistory is history
* @return task record list * @return task record list
*/ */
public Map<String,Object> queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate, Map<String,Object> queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate,
String taskDate, String sourceTable, String taskDate, String sourceTable,
String destTable, String endDate, String destTable, String endDate,
String state, Integer pageNo, Integer pageSize) { String state, Integer pageNo, Integer pageSize);
Map<String, Object> result = new HashMap<>(10);
PageInfo pageInfo = new PageInfo<TaskRecord>(pageNo, pageSize);
Map<String, String> map = new HashMap<>(10);
map.put("taskName", taskName);
map.put("taskDate", taskDate);
map.put("state", state);
map.put("sourceTable", sourceTable);
map.put("targetTable", destTable);
map.put("startTime", startDate);
map.put("endTime", endDate);
map.put("offset", pageInfo.getStart().toString());
map.put("pageSize", pageInfo.getPageSize().toString());
String table = isHistory ? TASK_RECORD_TABLE_HISTORY_HIVE_LOG : TASK_RECORD_TABLE_HIVE_LOG;
int count = TaskRecordDao.countTaskRecord(map, table);
List<TaskRecord> recordList = TaskRecordDao.queryAllTaskRecord(map, table);
pageInfo.setTotalCount(count);
pageInfo.setLists(recordList);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
} }

261
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java

@ -14,51 +14,19 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
/** /**
* udf function service * udf func service
*/ */
@Service public interface UdfFuncService {
public class UdfFuncService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(UdfFuncService.class);
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private UDFUserMapper udfUserMapper;
/** /**
* create udf function * create udf function
@ -73,72 +41,14 @@ public class UdfFuncService extends BaseService{
* @param className class name * @param className class name
* @return create result code * @return create result code
*/ */
public Result createUdfFunction(User loginUser, Result<Object> createUdfFunction(User loginUser,
String funcName, String funcName,
String className, String className,
String argTypes, String argTypes,
String database, String database,
String desc, String desc,
UdfType type, UdfType type,
int resourceId) { int resourceId);
Result result = new Result();
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()){
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP);
return result;
}
// verify udf func name exist
if (checkUdfFuncNameExists(funcName)) {
logger.error("udf func {} has exist, can't recreate", funcName);
putMsg(result, Status.UDF_FUNCTION_EXISTS);
return result;
}
Resource resource = resourceMapper.selectById(resourceId);
if (resource == null) {
logger.error("resourceId {} is not exist", resourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
//save data
UdfFunc udf = new UdfFunc();
Date now = new Date();
udf.setUserId(loginUser.getId());
udf.setFuncName(funcName);
udf.setClassName(className);
if (StringUtils.isNotEmpty(argTypes)) {
udf.setArgTypes(argTypes);
}
if (StringUtils.isNotEmpty(database)) {
udf.setDatabase(database);
}
udf.setDescription(desc);
udf.setResourceId(resourceId);
udf.setResourceName(resource.getFullName());
udf.setType(type);
udf.setCreateTime(now);
udf.setUpdateTime(now);
udfFuncMapper.insert(udf);
putMsg(result, Status.SUCCESS);
return result;
}
/**
*
* @param name name
* @return check result code
*/
private boolean checkUdfFuncNameExists(String name){
List<UdfFunc> resource = udfFuncMapper.queryUdfByIdStr(null, name);
return resource != null && resource.size() > 0;
}
/** /**
* query udf function * query udf function
@ -146,18 +56,7 @@ public class UdfFuncService extends BaseService{
* @param id udf function id * @param id udf function id
* @return udf function detail * @return udf function detail
*/ */
public Map<String, Object> queryUdfFuncDetail(int id) { Map<String, Object> queryUdfFuncDetail(int id);
Map<String, Object> result = new HashMap<>(5);
UdfFunc udfFunc = udfFuncMapper.selectById(id);
if (udfFunc == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
result.put(Constants.DATA_LIST, udfFunc);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* updateProcessInstance udf function * updateProcessInstance udf function
@ -172,67 +71,14 @@ public class UdfFuncService extends BaseService{
* @param className class name * @param className class name
* @return update result code * @return update result code
*/ */
public Map<String, Object> updateUdfFunc(int udfFuncId, Map<String, Object> updateUdfFunc(int udfFuncId,
String funcName, String funcName,
String className, String className,
String argTypes, String argTypes,
String database, String database,
String desc, String desc,
UdfType type, UdfType type,
int resourceId) { int resourceId);
Map<String, Object> result = new HashMap<>();
// verify udfFunc is exist
UdfFunc udf = udfFuncMapper.selectUdfById(udfFuncId);
if (udf == null) {
result.put(Constants.STATUS, Status.UDF_FUNCTION_NOT_EXIST);
result.put(Constants.MSG, Status.UDF_FUNCTION_NOT_EXIST.getMsg());
return result;
}
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()){
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP);
return result;
}
// verify udfFuncName is exist
if (!funcName.equals(udf.getFuncName())) {
if (checkUdfFuncNameExists(funcName)) {
logger.error("UdfFunc {} has exist, can't create again.", funcName);
result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS);
result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg());
return result;
}
}
Resource resource = resourceMapper.selectById(resourceId);
if (resource == null) {
logger.error("resourceId {} is not exist", resourceId);
result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST);
result.put(Constants.MSG, Status.RESOURCE_NOT_EXIST.getMsg());
return result;
}
Date now = new Date();
udf.setFuncName(funcName);
udf.setClassName(className);
udf.setArgTypes(argTypes);
if (StringUtils.isNotEmpty(database)) {
udf.setDatabase(database);
}
udf.setDescription(desc);
udf.setResourceId(resourceId);
udf.setResourceName(resource.getFullName());
udf.setType(type);
udf.setUpdateTime(now);
udfFuncMapper.updateById(udf);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* query udf function list paging * query udf function list paging
@ -243,37 +89,7 @@ public class UdfFuncService extends BaseService{
* @param searchVal search value * @param searchVal search value
* @return udf function list page * @return udf function list page
*/ */
public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize);
Map<String, Object> result = new HashMap<>(5);
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
IPage<UdfFunc> udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo);
pageInfo.setTotalCount((int)udfFuncList.getTotal());
pageInfo.setLists(udfFuncList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get udf functions
*
* @param loginUser login user
* @param searchVal search value
* @param pageSize page size
* @param pageNo page number
* @return udf function list page
*/
private IPage<UdfFunc> getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) {
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
userId = 0;
}
Page<UdfFunc> page = new Page(pageNo, pageSize);
return udfFuncMapper.queryUdfFuncPaging(page, userId, searchVal);
}
/** /**
* query udf list * query udf list
@ -282,18 +98,7 @@ public class UdfFuncService extends BaseService{
* @param type udf type * @param type udf type
* @return udf func list * @return udf func list
*/ */
public Map<String, Object> queryUdfFuncList(User loginUser, Integer type) { Map<String, Object> queryUdfFuncList(User loginUser, Integer type);
Map<String, Object> result = new HashMap<>(5);
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
userId = 0;
}
List<UdfFunc> udfFuncList = udfFuncMapper.getUdfFuncByType(userId, type);
result.put(Constants.DATA_LIST, udfFuncList);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* delete udf function * delete udf function
@ -301,15 +106,7 @@ public class UdfFuncService extends BaseService{
* @param id udf function id * @param id udf function id
* @return delete result code * @return delete result code
*/ */
@Transactional(rollbackFor = RuntimeException.class) Result<Object> delete(int id);
public Result delete(int id) {
Result result = new Result();
udfFuncMapper.deleteById(id);
udfUserMapper.deleteByUdfFuncId(id);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* verify udf function by name * verify udf function by name
@ -317,16 +114,6 @@ public class UdfFuncService extends BaseService{
* @param name name * @param name name
* @return true if the name can user, otherwise return false * @return true if the name can user, otherwise return false
*/ */
public Result verifyUdfFuncByName(String name) { Result<Object> verifyUdfFuncByName(String name);
Result result = new Result();
if (checkUdfFuncNameExists(name)) {
logger.error("UDF function name:{} has exist, can't create again.", name);
putMsg(result, Status.UDF_FUNCTION_EXISTS);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
} }

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UiPluginService.java

@ -22,7 +22,7 @@ import org.apache.dolphinscheduler.common.enums.PluginType;
import java.util.Map; import java.util.Map;
/** /**
* UiPluginService * ui plugin service
*/ */
public interface UiPluginService { public interface UiPluginService {

875
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java

File diff suppressed because it is too large Load Diff

83
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageService.java

@ -14,84 +14,19 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper;
import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage;
import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
@Service package org.apache.dolphinscheduler.api.service;
public class WorkFlowLineageService extends BaseService {
@Autowired
private WorkFlowLineageMapper workFlowLineageMapper;
public Map<String, Object> queryWorkFlowLineageByName(String workFlowName, int projectId) { import java.util.Map;
Map<String, Object> result = new HashMap<>(5); import java.util.Set;
List<WorkFlowLineage> workFlowLineageList = workFlowLineageMapper.queryByName(workFlowName, projectId);
result.put(Constants.DATA_LIST, workFlowLineageList);
putMsg(result, Status.SUCCESS);
return result;
}
private List<WorkFlowRelation> getWorkFlowRelationRecursion(Set<Integer> ids, List<WorkFlowRelation> workFlowRelations,Set<Integer> sourceIds) { /**
for(int id : ids) { * work flow lineage service
sourceIds.addAll(ids); */
List<WorkFlowRelation> workFlowRelationsTmp = workFlowLineageMapper.querySourceTarget(id); public interface WorkFlowLineageService {
if(workFlowRelationsTmp != null && !workFlowRelationsTmp.isEmpty()) {
Set<Integer> idsTmp = new HashSet<>();
for(WorkFlowRelation workFlowRelation:workFlowRelationsTmp) {
if(!sourceIds.contains(workFlowRelation.getTargetWorkFlowId())){
idsTmp.add(workFlowRelation.getTargetWorkFlowId());
}
}
workFlowRelations.addAll(workFlowRelationsTmp);
getWorkFlowRelationRecursion(idsTmp, workFlowRelations,sourceIds);
}
}
return workFlowRelations;
}
public Map<String, Object> queryWorkFlowLineageByIds(Set<Integer> ids,int projectId) { Map<String, Object> queryWorkFlowLineageByName(String workFlowName, int projectId);
Map<String, Object> result = new HashMap<>(5);
List<WorkFlowLineage> workFlowLineageList = workFlowLineageMapper.queryByIds(ids, projectId);
Map<String, Object> workFlowLists = new HashMap<>(5);
Set<Integer> idsV = new HashSet<>();
if(ids == null || ids.isEmpty()){
for(WorkFlowLineage workFlowLineage:workFlowLineageList) {
idsV.add(workFlowLineage.getWorkFlowId());
}
} else {
idsV = ids;
}
List<WorkFlowRelation> workFlowRelations = new ArrayList<>();
Set<Integer> sourceIds = new HashSet<>();
getWorkFlowRelationRecursion(idsV, workFlowRelations, sourceIds);
Set<Integer> idSet = new HashSet<>(); Map<String, Object> queryWorkFlowLineageByIds(Set<Integer> ids,int projectId);
//If the incoming parameter is not empty, you need to add downstream workflow detail attributes
if(ids != null && !ids.isEmpty()) {
for(WorkFlowRelation workFlowRelation : workFlowRelations) {
idSet.add(workFlowRelation.getTargetWorkFlowId());
}
for(int id : ids){
idSet.remove(id);
}
if(!idSet.isEmpty()) {
workFlowLineageList.addAll(workFlowLineageMapper.queryByIds(idSet, projectId));
}
}
workFlowLists.put("workFlowList",workFlowLineageList);
workFlowLists.put("workFlowRelationList",workFlowRelations);
result.put(Constants.DATA_LIST, workFlowLists);
putMsg(result, Status.SUCCESS);
return result;
}
} }

140
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java

@ -17,41 +17,14 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkerGroup;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/** /**
* work group service * worker group service
*/ */
@Service public interface WorkerGroupService {
public class WorkerGroupService extends BaseService {
private static final String NO_NODE_EXCEPTION_REGEX = "KeeperException$NoNodeException";
@Autowired
protected ZookeeperCachedOperator zookeeperCachedOperator;
@Autowired
ProcessInstanceMapper processInstanceMapper;
/** /**
* query worker group paging * query worker group paging
@ -62,118 +35,13 @@ public class WorkerGroupService extends BaseService {
* @param pageSize page size * @param pageSize page size
* @return worker group list page * @return worker group list page
*/ */
public Map<String, Object> queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal) { Map<String, Object> queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal);
// list from index
Integer fromIndex = (pageNo - 1) * pageSize;
// list to index
Integer toIndex = (pageNo - 1) * pageSize + pageSize;
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
List<WorkerGroup> workerGroups = getWorkerGroups(true);
List<WorkerGroup> resultDataList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(workerGroups)) {
List<WorkerGroup> searchValDataList = new ArrayList<>();
if (StringUtils.isNotEmpty(searchVal)) {
for (WorkerGroup workerGroup : workerGroups) {
if (workerGroup.getName().contains(searchVal)) {
searchValDataList.add(workerGroup);
}
}
} else {
searchValDataList = workerGroups;
}
if (searchValDataList.size() < pageSize) {
toIndex = (pageNo - 1) * pageSize + searchValDataList.size();
}
resultDataList = searchValDataList.subList(fromIndex, toIndex);
}
PageInfo<WorkerGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount(resultDataList.size());
pageInfo.setLists(resultDataList);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* query all worker group * query all worker group
* *
* @return all worker group list * @return all worker group list
*/ */
public Map<String, Object> queryAllGroup() { Map<String, Object> queryAllGroup();
Map<String, Object> result = new HashMap<>();
List<WorkerGroup> workerGroups = getWorkerGroups(false);
Set<String> availableWorkerGroupSet = workerGroups.stream()
.map(workerGroup -> workerGroup.getName())
.collect(Collectors.toSet());
result.put(Constants.DATA_LIST, availableWorkerGroupSet);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get worker groups
*
* @param isPaging whether paging
* @return WorkerGroup list
*/
private List<WorkerGroup> getWorkerGroups(boolean isPaging) {
String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS;
List<WorkerGroup> workerGroups = new ArrayList<>();
List<String> workerGroupList;
try {
workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath);
} catch (Exception e) {
if (e.getMessage().contains(NO_NODE_EXCEPTION_REGEX)) {
if (isPaging) {
return workerGroups;
} else {
//ignore noNodeException return Default
WorkerGroup wg = new WorkerGroup();
wg.setName(DEFAULT_WORKER_GROUP);
workerGroups.add(wg);
return workerGroups;
}
} else {
throw e;
}
}
for (String workerGroup : workerGroupList) {
String workerGroupPath = workerPath + "/" + workerGroup;
List<String> childrenNodes = zookeeperCachedOperator.getChildrenKeys(workerGroupPath);
String timeStamp = "";
for (int i = 0; i < childrenNodes.size(); i++) {
String ip = childrenNodes.get(i);
childrenNodes.set(i, ip.substring(0, ip.lastIndexOf(":")));
timeStamp = ip.substring(ip.lastIndexOf(":"));
}
if (CollectionUtils.isNotEmpty(childrenNodes)) {
WorkerGroup wg = new WorkerGroup();
wg.setName(workerGroup);
if (isPaging) {
wg.setIpList(childrenNodes);
String registeredIpValue = zookeeperCachedOperator.get(workerGroupPath + "/" + childrenNodes.get(0) + timeStamp);
wg.setCreateTime(DateUtils.stringToDate(registeredIpValue.split(",")[6]));
wg.setUpdateTime(DateUtils.stringToDate(registeredIpValue.split(",")[7]));
}
workerGroups.add(wg);
}
}
return workerGroups;
}
} }

14
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java

@ -14,11 +14,11 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.AccessTokenService; import org.apache.dolphinscheduler.api.service.AccessTokenService;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.UserType;
@ -44,7 +44,7 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* access token service impl * access token service impl
*/ */
@Service @Service
public class AccessTokenServiceImpl extends BaseService implements AccessTokenService { public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTokenService {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceImpl.class); private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceImpl.class);
@ -61,7 +61,7 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @return token list for page number and page size * @return token list for page number and page size
*/ */
public Map<String, Object> queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { public Map<String, Object> queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
PageInfo<AccessToken> pageInfo = new PageInfo<>(pageNo, pageSize); PageInfo<AccessToken> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<AccessToken> page = new Page<>(pageNo, pageSize); Page<AccessToken> page = new Page<>(pageNo, pageSize);
@ -87,7 +87,7 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @return create result code * @return create result code
*/ */
public Map<String, Object> createToken(User loginUser, int userId, String expireTime, String token) { public Map<String, Object> createToken(User loginUser, int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
if (!hasPerm(loginUser,userId)){ if (!hasPerm(loginUser,userId)){
putMsg(result, Status.USER_NO_OPERATION_PERM); putMsg(result, Status.USER_NO_OPERATION_PERM);
@ -124,7 +124,7 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @return token string * @return token string
*/ */
public Map<String, Object> generateToken(User loginUser, int userId, String expireTime) { public Map<String, Object> generateToken(User loginUser, int userId, String expireTime) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
if (!hasPerm(loginUser,userId)){ if (!hasPerm(loginUser,userId)){
putMsg(result, Status.USER_NO_OPERATION_PERM); putMsg(result, Status.USER_NO_OPERATION_PERM);
return result; return result;
@ -143,7 +143,7 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @return delete result code * @return delete result code
*/ */
public Map<String, Object> delAccessTokenById(User loginUser, int id) { public Map<String, Object> delAccessTokenById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
AccessToken accessToken = accessTokenMapper.selectById(id); AccessToken accessToken = accessTokenMapper.selectById(id);
@ -174,7 +174,7 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe
* @return update result code * @return update result code
*/ */
public Map<String, Object> updateToken(User loginUser, int id, int userId, String expireTime, String token) { public Map<String, Object> updateToken(User loginUser, int id, int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
if (!hasPerm(loginUser,userId)){ if (!hasPerm(loginUser,userId)){
putMsg(result, Status.USER_NO_OPERATION_PERM); putMsg(result, Status.USER_NO_OPERATION_PERM);
return result; return result;

207
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java

@ -0,0 +1,207 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.AlertGroupService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.AlertGroup;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* alert group service impl
*/
@Service
public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroupService {
@Autowired
private AlertGroupMapper alertGroupMapper;
/**
* query alert group list
*
* @return alert group list
*/
public Map<String, Object> queryAlertgroup() {
HashMap<String, Object> result = new HashMap<>();
List<AlertGroup> alertGroups = alertGroupMapper.queryAllGroupList();
result.put(Constants.DATA_LIST, alertGroups);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* paging query alarm group list
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return alert group list page
*/
public Map<String, Object> listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
Page<AlertGroup> page = new Page<>(pageNo, pageSize);
IPage<AlertGroup> alertGroupIPage = alertGroupMapper.queryAlertGroupPage(
page, searchVal);
PageInfo<AlertGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int) alertGroupIPage.getTotal());
pageInfo.setLists(alertGroupIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* create alert group
*
* @param loginUser login user
* @param groupName group name
* @param desc description
* @param alertInstanceIds alertInstanceIds
* @return create result code
*/
public Map<String, Object> createAlertgroup(User loginUser, String groupName, String desc, String alertInstanceIds) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (isNotAdmin(loginUser, result)) {
return result;
}
AlertGroup alertGroup = new AlertGroup();
Date now = new Date();
alertGroup.setGroupName(groupName);
alertGroup.setAlertInstanceIds(alertInstanceIds);
alertGroup.setDescription(desc);
alertGroup.setCreateTime(now);
alertGroup.setUpdateTime(now);
alertGroup.setCreateUserId(loginUser.getId());
// insert
int insert = alertGroupMapper.insert(alertGroup);
if (insert > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_ALERT_GROUP_ERROR);
}
return result;
}
/**
* updateProcessInstance alert group
*
* @param loginUser login user
* @param id alert group id
* @param groupName group name
* @param desc description
* @param alertInstanceIds alertInstanceIds
* @return update result code
*/
public Map<String, Object> updateAlertgroup(User loginUser, int id, String groupName, String desc, String alertInstanceIds) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
AlertGroup alertGroup = alertGroupMapper.selectById(id);
if (alertGroup == null) {
putMsg(result, Status.ALERT_GROUP_NOT_EXIST);
return result;
}
Date now = new Date();
if (StringUtils.isNotEmpty(groupName)) {
alertGroup.setGroupName(groupName);
}
alertGroup.setDescription(desc);
alertGroup.setUpdateTime(now);
alertGroup.setCreateUserId(loginUser.getId());
alertGroup.setAlertInstanceIds(alertInstanceIds);
alertGroupMapper.updateById(alertGroup);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete alert group by id
*
* @param loginUser login user
* @param id alert group id
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> delAlertgroupById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
if (isNotAdmin(loginUser, result)) {
return result;
}
//check exist
AlertGroup alertGroup = alertGroupMapper.selectById(id);
if (alertGroup == null) {
putMsg(result, Status.ALERT_GROUP_NOT_EXIST);
return result;
}
alertGroupMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify group name exists
*
* @param groupName group name
* @return check result code
*/
public boolean existGroupName(String groupName) {
List<AlertGroup> alertGroup = alertGroupMapper.queryByGroupName(groupName);
return CollectionUtils.isNotEmpty(alertGroup);
}
}

23
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java

@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.AlertPluginInstanceService; import org.apache.dolphinscheduler.api.service.AlertPluginInstanceService;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.vo.AlertPluginInstanceVO; import org.apache.dolphinscheduler.api.vo.AlertPluginInstanceVO;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
@ -32,9 +31,6 @@ import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper;
import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper; import org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper;
import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer;
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import org.apache.commons.collections4.MapUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -57,7 +53,7 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
*/ */
@Service @Service
@Lazy @Lazy
public class AlertPluginInstanceServiceImpl extends BaseService implements AlertPluginInstanceService { public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements AlertPluginInstanceService {
@Autowired @Autowired
private AlertPluginInstanceMapper alertPluginInstanceMapper; private AlertPluginInstanceMapper alertPluginInstanceMapper;
@ -250,26 +246,15 @@ public class AlertPluginInstanceServiceImpl extends BaseService implements Alert
} }
/** /**
* parseToPluginUiParams * parse To Plugin Ui Params
* *
* @param pluginParamsMapString k-v data * @param pluginParamsMapString k-v data
* @param pluginUiParams Complete parameters(include ui) * @param pluginUiParams Complete parameters(include ui)
* @return Complete parameters list(include ui) * @return Complete parameters list(include ui)
*/ */
private String parseToPluginUiParams(String pluginParamsMapString, String pluginUiParams) { private String parseToPluginUiParams(String pluginParamsMapString, String pluginUiParams) {
Map<String, String> paramsMap = JSONUtils.toMap(pluginParamsMapString); List<Map<String, Object>> pluginParamsList = PluginParamsTransfer.generatePluginParams(pluginParamsMapString, pluginUiParams);
if (MapUtils.isEmpty(paramsMap)) { return JSONUtils.toJsonString(pluginParamsList);
return null;
}
List<PluginParams> pluginParamsList = JSONUtils.toList(pluginUiParams, PluginParams.class);
List<PluginParams> newPluginParamsList = new ArrayList<>(pluginParamsList.size());
pluginParamsList.forEach(pluginParams -> {
pluginParams.setValue(paramsMap.get(pluginParams.getName()));
newPluginParamsList.add(pluginParams);
});
return JSONUtils.toJsonString(newPluginParamsList);
} }
private boolean checkHasAssociatedAlertGroup(String id) { private boolean checkHasAssociatedAlertGroup(String id) {

136
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java

@ -0,0 +1,136 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Map;
/**
* base service impl
*/
public class BaseServiceImpl implements BaseService {
/**
* check admin
*
* @param user input user
* @return ture if administrator, otherwise return false
*/
public boolean isAdmin(User user) {
return user.getUserType() == UserType.ADMIN_USER;
}
/**
* isNotAdmin
*
* @param loginUser login user
* @param result result code
* @return true if not administrator, otherwise false
*/
public boolean isNotAdmin(User loginUser, Map<String, Object> result) {
//only admin can operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return true;
}
return false;
}
/**
* put message to map
*
* @param result result code
* @param status status
* @param statusParams status message
*/
public void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
/**
* put message to result object
*
* @param result result code
* @param status status
* @param statusParams status message
*/
public void putMsg(Result result, Status status, Object... statusParams) {
result.setCode(status.getCode());
if (statusParams != null && statusParams.length > 0) {
result.setMsg(MessageFormat.format(status.getMsg(), statusParams));
} else {
result.setMsg(status.getMsg());
}
}
/**
* check
*
* @param result result
* @param bool bool
* @param userNoOperationPerm status
* @return check result
*/
public boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm) {
// only admin can operate
if (bool) {
result.put(Constants.STATUS, userNoOperationPerm);
result.put(Constants.MSG, userNoOperationPerm.getMsg());
return true;
}
return false;
}
/**
* create tenant dir if not exists
*
* @param tenantCode tenant code
* @throws IOException if hdfs operation exception
*/
public void createTenantDirIfNotExists(String tenantCode) throws IOException {
String resourcePath = HadoopUtils.getHdfsResDir(tenantCode);
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
// init resource path and udf path
HadoopUtils.getInstance().mkdir(resourcePath);
HadoopUtils.getInstance().mkdir(udfsPath);
}
/**
* has perm
*
* @param operateUser operate user
* @param createUserId create user id
*/
public boolean hasPerm(User operateUser, int createUserId) {
return operateUser.getId() == createUserId || isAdmin(operateUser);
}
}

10
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java

@ -14,13 +14,13 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.dto.CommandStateCount; import org.apache.dolphinscheduler.api.dto.CommandStateCount;
import org.apache.dolphinscheduler.api.dto.DefineUserDto; import org.apache.dolphinscheduler.api.dto.DefineUserDto;
import org.apache.dolphinscheduler.api.dto.TaskCountDto; import org.apache.dolphinscheduler.api.dto.TaskCountDto;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.DataAnalysisService; import org.apache.dolphinscheduler.api.service.DataAnalysisService;
import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
@ -60,7 +60,7 @@ import org.springframework.stereotype.Service;
* data analysis service impl * data analysis service impl
*/ */
@Service @Service
public class DataAnalysisServiceImpl extends BaseService implements DataAnalysisService { public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnalysisService {
@Autowired @Autowired
private ProjectMapper projectMapper; private ProjectMapper projectMapper;
@ -130,7 +130,7 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis
private Map<String, Object> countStateByProject(User loginUser, int projectId, String startDate, String endDate private Map<String, Object> countStateByProject(User loginUser, int projectId, String startDate, String endDate
, TriFunction<Date, Date, Integer[], List<ExecuteStatusCount>> instanceStateCounter) { , TriFunction<Date, Date, Integer[], List<ExecuteStatusCount>> instanceStateCounter) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
boolean checkProject = checkProject(loginUser, projectId, result); boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) { if (!checkProject) {
return result; return result;
@ -193,7 +193,7 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis
*/ */
public Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate) { public Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
boolean checkProject = checkProject(loginUser, projectId, result); boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) { if (!checkProject) {
return result; return result;
@ -264,7 +264,7 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis
* @return queue state count data * @return queue state count data
*/ */
public Map<String, Object> countQueueState(User loginUser, int projectId) { public Map<String, Object> countQueueState(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
boolean checkProject = checkProject(loginUser, projectId, result); boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) { if (!checkProject) {

659
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java

@ -0,0 +1,659 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.DataSourceService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.datasource.OracleDataSource;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* data source service impl
*/
@Service
public class DataSourceServiceImpl extends BaseServiceImpl implements DataSourceService {
private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceImpl.class);
public static final String NAME = "name";
public static final String NOTE = "note";
public static final String TYPE = "type";
public static final String HOST = "host";
public static final String PORT = "port";
public static final String PRINCIPAL = "principal";
public static final String DATABASE = "database";
public static final String USER_NAME = "userName";
public static final String OTHER = "other";
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private DataSourceUserMapper datasourceUserMapper;
/**
* create data source
*
* @param loginUser login user
* @param name data source name
* @param desc data source description
* @param type data source type
* @param parameter datasource parameters
* @return create result code
*/
public Result<Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter) {
Result<Object> result = new Result<>();
// check name can use or not
if (checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
Result<Object> isConnection = checkConnection(type, parameter);
if (Status.SUCCESS.getCode() != isConnection.getCode()) {
return result;
}
// build datasource
DataSource dataSource = new DataSource();
Date now = new Date();
dataSource.setName(name.trim());
dataSource.setNote(desc);
dataSource.setUserId(loginUser.getId());
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type);
dataSource.setConnectionParams(parameter);
dataSource.setCreateTime(now);
dataSource.setUpdateTime(now);
dataSourceMapper.insert(dataSource);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance datasource
*
* @param loginUser login user
* @param name data source name
* @param desc data source description
* @param type data source type
* @param parameter datasource parameters
* @param id data source id
* @return update result code
*/
public Result<Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) {
Result<Object> result = new Result<>();
// determine whether the data source exists
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
//check name can use or not
if (!name.trim().equals(dataSource.getName()) && checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
//check password,if the password is not updated, set to the old password.
ObjectNode paramObject = JSONUtils.parseObject(parameter);
String password = paramObject.path(Constants.PASSWORD).asText();
if (StringUtils.isBlank(password)) {
String oldConnectionParams = dataSource.getConnectionParams();
ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams);
paramObject.put(Constants.PASSWORD, oldParams.path(Constants.PASSWORD).asText());
}
// connectionParams json
String connectionParams = paramObject.toString();
Result<Object> isConnection = checkConnection(type, parameter);
if (Status.SUCCESS.getCode() != isConnection.getCode()) {
return result;
}
Date now = new Date();
dataSource.setName(name.trim());
dataSource.setNote(desc);
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type);
dataSource.setConnectionParams(connectionParams);
dataSource.setUpdateTime(now);
dataSourceMapper.updateById(dataSource);
putMsg(result, Status.SUCCESS);
return result;
}
private boolean checkName(String name) {
List<DataSource> queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim());
return queryDataSource != null && !queryDataSource.isEmpty();
}
/**
* updateProcessInstance datasource
*
* @param id datasource id
* @return data source detail
*/
public Map<String, Object> queryDataSource(int id) {
Map<String, Object> result = new HashMap<>();
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
// type
String dataSourceType = dataSource.getType().toString();
// name
String dataSourceName = dataSource.getName();
// desc
String desc = dataSource.getNote();
// parameter
String parameter = dataSource.getConnectionParams();
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter);
DbConnectType connectType = null;
String hostSeperator = Constants.DOUBLE_SLASH;
if (DbType.ORACLE.equals(dataSource.getType())) {
connectType = ((OracleDataSource) datasourceForm).getConnectType();
if (DbConnectType.ORACLE_SID.equals(connectType)) {
hostSeperator = Constants.AT_SIGN;
}
}
String database = datasourceForm.getDatabase();
// jdbc connection params
String other = datasourceForm.getOther();
String address = datasourceForm.getAddress();
String[] hostsPorts = getHostsAndPort(address, hostSeperator);
// ip host
String host = hostsPorts[0];
// prot
String port = hostsPorts[1];
String separator = "";
switch (dataSource.getType()) {
case HIVE:
case SQLSERVER:
separator = ";";
break;
case MYSQL:
case POSTGRESQL:
case CLICKHOUSE:
case ORACLE:
case PRESTO:
separator = "&";
break;
default:
separator = "&";
break;
}
Map<String, String> otherMap = new LinkedHashMap<>();
if (other != null) {
String[] configs = other.split(separator);
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
}
Map<String, Object> map = new HashMap<>();
map.put(NAME, dataSourceName);
map.put(NOTE, desc);
map.put(TYPE, dataSourceType);
if (connectType != null) {
map.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
map.put(HOST, host);
map.put(PORT, port);
map.put(PRINCIPAL, datasourceForm.getPrincipal());
map.put(Constants.KERBEROS_KRB5_CONF_PATH, datasourceForm.getJavaSecurityKrb5Conf());
map.put(Constants.KERBEROS_KEY_TAB_USERNAME, datasourceForm.getLoginUserKeytabUsername());
map.put(Constants.KERBEROS_KEY_TAB_PATH, datasourceForm.getLoginUserKeytabPath());
map.put(DATABASE, database);
map.put(USER_NAME, datasourceForm.getUser());
map.put(OTHER, otherMap);
result.put(Constants.DATA_LIST, map);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query datasource list by keyword
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return data source list page
*/
public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
IPage<DataSource> dataSourceList;
Page<DataSource> dataSourcePage = new Page<>(pageNo, pageSize);
if (isAdmin(loginUser)) {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
} else {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal);
}
List<DataSource> dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>();
handlePasswd(dataSources);
PageInfo<DataSource> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L));
pageInfo.setLists(dataSources);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* handle datasource connection password for safety
*
* @param dataSourceList
*/
private void handlePasswd(List<DataSource> dataSourceList) {
for (DataSource dataSource : dataSourceList) {
String connectionParams = dataSource.getConnectionParams();
ObjectNode object = JSONUtils.parseObject(connectionParams);
object.put(Constants.PASSWORD, getHiddenPassword());
dataSource.setConnectionParams(object.toString());
}
}
/**
* get hidden password (resolve the security hotspot)
*
* @return hidden password
*/
private String getHiddenPassword() {
return Constants.XXXXXX;
}
/**
* query data resource list
*
* @param loginUser login user
* @param type data source type
* @return data source list page
*/
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>();
List<DataSource> datasourceList;
if (isAdmin(loginUser)) {
datasourceList = dataSourceMapper.listAllDataSourceByType(type);
} else {
datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type);
}
result.put(Constants.DATA_LIST, datasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify datasource exists
*
* @param name datasource name
* @return true if data datasource not exists, otherwise return false
*/
public Result<Object> verifyDataSourceName(String name) {
Result<Object> result = new Result<>();
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name);
if (dataSourceList != null && !dataSourceList.isEmpty()) {
putMsg(result, Status.DATASOURCE_EXIST);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* check connection
*
* @param type data source type
* @param parameter data source parameters
* @return true if connect successfully, otherwise false
*/
public Result<Object> checkConnection(DbType type, String parameter) {
Result<Object> result = new Result<>();
BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter);
if (datasource == null) {
putMsg(result, Status.DATASOURCE_TYPE_NOT_EXIST, type);
return result;
}
try (Connection connection = datasource.getConnection()) {
if (connection == null) {
putMsg(result, Status.CONNECTION_TEST_FAILURE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
} catch (Exception e) {
logger.error("datasource test connection error, dbType:{}, jdbcUrl:{}, message:{}.", type, datasource.getJdbcUrl(), e.getMessage());
return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(),e.getMessage());
}
}
/**
* test connection
*
* @param id datasource id
* @return connect result code
*/
public Result<Object> connectionTest(int id) {
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
Result<Object> result = new Result<>();
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
return checkConnection(dataSource.getType(), dataSource.getConnectionParams());
}
/**
* build paramters
*
* @param type data source type
* @param host data source host
* @param port data source port
* @param database data source database name
* @param userName user name
* @param password password
* @param other other parameters
* @param principal principal
* @return datasource parameter
*/
public String buildParameter(DbType type, String host,
String port, String database, String principal, String userName,
String password, DbConnectType connectType, String other,
String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) {
String address = buildAddress(type, host, port, connectType);
Map<String, Object> parameterMap = new LinkedHashMap<>();
String jdbcUrl;
if (DbType.SQLSERVER == type) {
jdbcUrl = address + ";databaseName=" + database;
} else {
jdbcUrl = address + "/" + database;
}
if (Constants.ORACLE.equals(type.name())) {
parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
if (CommonUtils.getKerberosStartupState()
&& (type == DbType.HIVE || type == DbType.SPARK)) {
jdbcUrl += ";principal=" + principal;
}
String separator = "";
if (Constants.MYSQL.equals(type.name())
|| Constants.POSTGRESQL.equals(type.name())
|| Constants.CLICKHOUSE.equals(type.name())
|| Constants.ORACLE.equals(type.name())
|| Constants.PRESTO.equals(type.name())) {
separator = "&";
} else if (Constants.HIVE.equals(type.name())
|| Constants.SPARK.equals(type.name())
|| Constants.DB2.equals(type.name())
|| Constants.SQLSERVER.equals(type.name())) {
separator = ";";
}
parameterMap.put(TYPE, connectType);
parameterMap.put(Constants.ADDRESS, address);
parameterMap.put(Constants.DATABASE, database);
parameterMap.put(Constants.JDBC_URL, jdbcUrl);
parameterMap.put(Constants.USER, userName);
parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password));
if (CommonUtils.getKerberosStartupState()
&& (type == DbType.HIVE || type == DbType.SPARK)) {
parameterMap.put(Constants.PRINCIPAL, principal);
parameterMap.put(Constants.KERBEROS_KRB5_CONF_PATH, javaSecurityKrb5Conf);
parameterMap.put(Constants.KERBEROS_KEY_TAB_USERNAME, loginUserKeytabUsername);
parameterMap.put(Constants.KERBEROS_KEY_TAB_PATH, loginUserKeytabPath);
}
Map<String, String> map = JSONUtils.toMap(other);
if (map != null) {
StringBuilder otherSb = new StringBuilder();
for (Map.Entry<String, String> entry: map.entrySet()) {
otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator));
}
if (!Constants.DB2.equals(type.name())) {
otherSb.deleteCharAt(otherSb.length() - 1);
}
parameterMap.put(Constants.OTHER, otherSb);
}
if (logger.isDebugEnabled()) {
logger.info("parameters map:{}", JSONUtils.toJsonString(parameterMap));
}
return JSONUtils.toJsonString(parameterMap);
}
private String buildAddress(DbType type, String host, String port, DbConnectType connectType) {
StringBuilder sb = new StringBuilder();
if (Constants.MYSQL.equals(type.name())) {
sb.append(Constants.JDBC_MYSQL);
sb.append(host).append(":").append(port);
} else if (Constants.POSTGRESQL.equals(type.name())) {
sb.append(Constants.JDBC_POSTGRESQL);
sb.append(host).append(":").append(port);
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) {
sb.append(Constants.JDBC_HIVE_2);
String[] hostArray = host.split(",");
if (hostArray.length > 0) {
for (String zkHost : hostArray) {
sb.append(String.format("%s:%s,", zkHost, port));
}
sb.deleteCharAt(sb.length() - 1);
}
} else if (Constants.CLICKHOUSE.equals(type.name())) {
sb.append(Constants.JDBC_CLICKHOUSE);
sb.append(host).append(":").append(port);
} else if (Constants.ORACLE.equals(type.name())) {
if (connectType == DbConnectType.ORACLE_SID) {
sb.append(Constants.JDBC_ORACLE_SID);
} else {
sb.append(Constants.JDBC_ORACLE_SERVICE_NAME);
}
sb.append(host).append(":").append(port);
} else if (Constants.SQLSERVER.equals(type.name())) {
sb.append(Constants.JDBC_SQLSERVER);
sb.append(host).append(":").append(port);
} else if (Constants.DB2.equals(type.name())) {
sb.append(Constants.JDBC_DB2);
sb.append(host).append(":").append(port);
} else if (Constants.PRESTO.equals(type.name())) {
sb.append(Constants.JDBC_PRESTO);
sb.append(host).append(":").append(port);
}
return sb.toString();
}
/**
* delete datasource
*
* @param loginUser login user
* @param datasourceId data source id
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Result<Object> delete(User loginUser, int datasourceId) {
Result<Object> result = new Result<>();
try {
//query datasource by id
DataSource dataSource = dataSourceMapper.selectById(datasourceId);
if (dataSource == null) {
logger.error("resource id {} not exist", datasourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
dataSourceMapper.deleteById(datasourceId);
datasourceUserMapper.deleteByDatasourceId(datasourceId);
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
logger.error("delete datasource error", e);
throw new RuntimeException("delete datasource error");
}
return result;
}
/**
* unauthorized datasource
*
* @param loginUser login user
* @param userId user id
* @return unauthed data source result code
*/
public Map<String, Object> unauthDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
//only admin operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
/**
* query all data sources except userId
*/
List<DataSource> resultList = new ArrayList<>();
List<DataSource> datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId);
Set<DataSource> datasourceSet = null;
if (datasourceList != null && !datasourceList.isEmpty()) {
datasourceSet = new HashSet<>(datasourceList);
List<DataSource> authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId);
Set<DataSource> authedDataSourceSet = null;
if (authedDataSourceList != null && !authedDataSourceList.isEmpty()) {
authedDataSourceSet = new HashSet<>(authedDataSourceList);
datasourceSet.removeAll(authedDataSourceSet);
}
resultList = new ArrayList<>(datasourceSet);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized datasource
*
* @param loginUser login user
* @param userId user id
* @return authorized result code
*/
public Map<String, Object> authedDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<DataSource> authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId);
result.put(Constants.DATA_LIST, authedDatasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get host and port by address
*
* @param address address
* @param separator separator
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address, String separator) {
String[] result = new String[2];
String[] tmpArray = address.split(separator);
String hostsAndPorts = tmpArray[tmpArray.length - 1];
StringBuilder hosts = new StringBuilder();
String[] hostPortArray = hostsAndPorts.split(Constants.COMMA);
String port = hostPortArray[0].split(Constants.COLON)[1];
for (String hostPort : hostPortArray) {
hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA);
}
hosts.deleteCharAt(hosts.length() - 1);
result[0] = hosts.toString();
result[1] = port;
return result;
}
}

578
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java

@ -0,0 +1,578 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT;
import org.apache.dolphinscheduler.api.enums.ExecuteType;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ExecutorService;
import org.apache.dolphinscheduler.api.service.MonitorService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.RunMode;
import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* executor service impl
*/
@Service
public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorService {
private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceImpl.class);
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private MonitorService monitorService;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private ProcessService processService;
/**
* execute process instance
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process Definition Id
* @param cronTime cron time
* @param commandType command type
* @param failureStrategy failuer strategy
* @param startNodeList start nodelist
* @param taskDependType node dependency type
* @param warningType warning type
* @param warningGroupId notify group id
* @param processInstancePriority process instance priority
* @param workerGroup worker group name
* @param runMode run mode
* @param timeout timeout
* @param startParams the global param values which pass to new process instance
* @return execute process instance code
*/
public Map<String, Object> execProcessInstance(User loginUser, String projectName,
int processDefinitionId, String cronTime, CommandType commandType,
FailureStrategy failureStrategy, String startNodeList,
TaskDependType taskDependType, WarningType warningType, int warningGroupId,
RunMode runMode,
Priority processInstancePriority, String workerGroup, Integer timeout,
Map<String, String> startParams) {
Map<String, Object> result = new HashMap<>();
// timeout is invalid
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) {
putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR);
return result;
}
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project);
if (checkResultAndAuth != null) {
return checkResultAndAuth;
}
// check process define release state
ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId);
result = checkProcessDefinitionValid(processDefinition, processDefinitionId);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (!checkTenantSuitable(processDefinition)) {
logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ",
processDefinition.getId(), processDefinition.getName());
putMsg(result, Status.TENANT_NOT_SUITABLE);
return result;
}
// check master exists
if (!checkMasterExists(result)) {
return result;
}
/**
* create command
*/
int create = this.createCommand(commandType, processDefinitionId,
taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(),
warningGroupId, runMode, processInstancePriority, workerGroup, startParams);
if (create > 0) {
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.START_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* check whether master exists
*
* @param result result
* @return master exists return true , otherwise return false
*/
private boolean checkMasterExists(Map<String, Object> result) {
// check master server exists
List<Server> masterServers = monitorService.getServerListFromZK(true);
// no master
if (masterServers.isEmpty()) {
putMsg(result, Status.MASTER_NOT_EXISTS);
return false;
}
return true;
}
/**
* check whether the process definition can be executed
*
* @param processDefinition process definition
* @param processDefineId process definition id
* @return check result code
*/
public Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId) {
Map<String, Object> result = new HashMap<>();
if (processDefinition == null) {
// check process definition exists
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId);
} else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
// check process definition online
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefineId);
} else {
result.put(Constants.STATUS, Status.SUCCESS);
}
return result;
}
/**
* do action to process instancepause, stop, repeat, recover from pause, recover from stop
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @param executeType execute type
* @return execute result code
*/
public Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = checkResultAndAuth(loginUser, projectName, project);
if (checkResult != null) {
return checkResult;
}
// check master exists
if (!checkMasterExists(result)) {
return result;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) {
result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
}
checkResult = checkExecuteType(processInstance, executeType);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
if (!checkTenantSuitable(processDefinition)) {
logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ",
processDefinition.getId(), processDefinition.getName());
putMsg(result, Status.TENANT_NOT_SUITABLE);
}
switch (executeType) {
case REPEAT_RUNNING:
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING);
break;
case RECOVER_SUSPENDED_PROCESS:
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS);
break;
case START_FAILURE_TASK_PROCESS:
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS);
break;
case STOP:
if (processInstance.getState() == ExecutionStatus.READY_STOP) {
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState());
} else {
result = updateProcessInstancePrepare(processInstance, CommandType.STOP, ExecutionStatus.READY_STOP);
}
break;
case PAUSE:
if (processInstance.getState() == ExecutionStatus.READY_PAUSE) {
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState());
} else {
result = updateProcessInstancePrepare(processInstance, CommandType.PAUSE, ExecutionStatus.READY_PAUSE);
}
break;
default:
logger.error("unknown execute type : {}", executeType);
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type");
break;
}
return result;
}
/**
* check tenant suitable
*
* @param processDefinition process definition
* @return true if tenant suitable, otherwise return false
*/
private boolean checkTenantSuitable(ProcessDefinition processDefinition) {
Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(),
processDefinition.getUserId());
return tenant != null;
}
/**
* Check the state of process instance and the type of operation match
*
* @param processInstance process instance
* @param executeType execute type
* @return check result code
*/
private Map<String, Object> checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) {
Map<String, Object> result = new HashMap<>();
ExecutionStatus executionStatus = processInstance.getState();
boolean checkResult = false;
switch (executeType) {
case PAUSE:
case STOP:
if (executionStatus.typeIsRunning()) {
checkResult = true;
}
break;
case REPEAT_RUNNING:
if (executionStatus.typeIsFinished()) {
checkResult = true;
}
break;
case START_FAILURE_TASK_PROCESS:
if (executionStatus.typeIsFailure()) {
checkResult = true;
}
break;
case RECOVER_SUSPENDED_PROCESS:
if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) {
checkResult = true;
}
break;
default:
break;
}
if (!checkResult) {
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString());
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* prepare to update process instance command type and status
*
* @param processInstance process instance
* @param commandType command type
* @param executionStatus execute status
* @return update result
*/
private Map<String, Object> updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) {
Map<String, Object> result = new HashMap<>();
processInstance.setCommandType(commandType);
processInstance.addHistoryCmd(commandType);
processInstance.setState(executionStatus);
int update = processService.updateProcessInstance(processInstance);
// determine whether the process is normal
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* insert command, used in the implementation of the page, re run, recovery (pause / failure) execution
*
* @param loginUser login user
* @param instanceId instance id
* @param processDefinitionId process definition id
* @param commandType command type
* @return insert result code
*/
private Map<String, Object> insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) {
Map<String, Object> result = new HashMap<>();
Command command = new Command();
command.setCommandType(commandType);
command.setProcessDefinitionId(processDefinitionId);
command.setCommandParam(String.format("{\"%s\":%d}",
CMD_PARAM_RECOVER_PROCESS_ID_STRING, instanceId));
command.setExecutorId(loginUser.getId());
if (!processService.verifyIsNeedCreateCommand(command)) {
putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionId);
return result;
}
int create = processService.createCommand(command);
if (create > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* check if sub processes are offline before starting process definition
*
* @param processDefineId process definition id
* @return check result code
*/
public Map<String, Object> startCheckByProcessDefinedId(int processDefineId) {
Map<String, Object> result = new HashMap<>();
if (processDefineId == 0) {
logger.error("process definition id is null");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "process definition id");
}
List<Integer> ids = new ArrayList<>();
processService.recurseFindSubProcessId(processDefineId, ids);
Integer[] idArray = ids.toArray(new Integer[ids.size()]);
if (!ids.isEmpty()) {
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray);
if (processDefinitionList != null) {
for (ProcessDefinition processDefinition : processDefinitionList) {
/**
* if there is no online process, exit directly
*/
if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName());
logger.info("not release process definition id: {} , name : {}",
processDefinition.getId(), processDefinition.getName());
return result;
}
}
}
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* create command
*
* @param commandType commandType
* @param processDefineId processDefineId
* @param nodeDep nodeDep
* @param failureStrategy failureStrategy
* @param startNodeList startNodeList
* @param schedule schedule
* @param warningType warningType
* @param executorId executorId
* @param warningGroupId warningGroupId
* @param runMode runMode
* @param processInstancePriority processInstancePriority
* @param workerGroup workerGroup
* @return command id
*/
private int createCommand(CommandType commandType, int processDefineId,
TaskDependType nodeDep, FailureStrategy failureStrategy,
String startNodeList, String schedule, WarningType warningType,
int executorId, int warningGroupId,
RunMode runMode, Priority processInstancePriority, String workerGroup,
Map<String, String> startParams) {
/**
* instantiate command schedule instance
*/
Command command = new Command();
Map<String, String> cmdParam = new HashMap<>();
if (commandType == null) {
command.setCommandType(CommandType.START_PROCESS);
} else {
command.setCommandType(commandType);
}
command.setProcessDefinitionId(processDefineId);
if (nodeDep != null) {
command.setTaskDependType(nodeDep);
}
if (failureStrategy != null) {
command.setFailureStrategy(failureStrategy);
}
if (StringUtils.isNotEmpty(startNodeList)) {
cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList);
}
if (warningType != null) {
command.setWarningType(warningType);
}
if (startParams != null && startParams.size() > 0) {
cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams));
}
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
command.setExecutorId(executorId);
command.setWarningGroupId(warningGroupId);
command.setProcessInstancePriority(processInstancePriority);
command.setWorkerGroup(workerGroup);
Date start = null;
Date end = null;
if (StringUtils.isNotEmpty(schedule)) {
String[] interval = schedule.split(",");
if (interval.length == 2) {
start = DateUtils.getScheduleDate(interval[0]);
end = DateUtils.getScheduleDate(interval[1]);
}
}
// determine whether to complement
if (commandType == CommandType.COMPLEMENT_DATA) {
runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode;
if (null != start && null != end && !start.after(end)) {
if (runMode == RunMode.RUN_MODE_SERIAL) {
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
return processService.createCommand(command);
} else if (runMode == RunMode.RUN_MODE_PARALLEL) {
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId);
List<Date> listDate = new LinkedList<>();
if (!CollectionUtils.isEmpty(schedules)) {
for (Schedule item : schedules) {
listDate.addAll(CronUtils.getSelfFireDateList(start, end, item.getCrontab()));
}
}
if (!CollectionUtils.isEmpty(listDate)) {
// loop by schedule date
for (Date date : listDate) {
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
processService.createCommand(command);
}
return listDate.size();
} else {
// loop by day
int runCunt = 0;
while (!start.after(end)) {
runCunt += 1;
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
processService.createCommand(command);
start = DateUtils.getSomeDay(start, 1);
}
return runCunt;
}
}
} else {
logger.error("there is not valid schedule date for the process definition: id:{}", processDefineId);
}
} else {
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
return processService.createCommand(command);
}
return 0;
}
/**
* check result and auth
*/
private Map<String, Object> checkResultAndAuth(User loginUser, String projectName, Project project) {
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
return null;
}
}

3
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
@ -41,7 +42,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
/** /**
* log service * logger service impl
*/ */
@Service @Service
public class LoggerServiceImpl implements LoggerService { public class LoggerServiceImpl implements LoggerService {

160
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java

@ -0,0 +1,160 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.utils.Preconditions.checkNotNull;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.MonitorService;
import org.apache.dolphinscheduler.api.utils.ZookeeperMonitor;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ZKNodeType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.model.WorkerServerModel;
import org.apache.dolphinscheduler.dao.MonitorDBDao;
import org.apache.dolphinscheduler.dao.entity.MonitorRecord;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.google.common.collect.Sets;
/**
* monitor service impl
*/
@Service
public class MonitorServiceImpl extends BaseServiceImpl implements MonitorService {
@Autowired
private ZookeeperMonitor zookeeperMonitor;
@Autowired
private MonitorDBDao monitorDBDao;
/**
* query database state
*
* @param loginUser login user
* @return data base state
*/
public Map<String,Object> queryDatabaseState(User loginUser) {
Map<String, Object> result = new HashMap<>();
List<MonitorRecord> monitorRecordList = monitorDBDao.queryDatabaseState();
result.put(Constants.DATA_LIST, monitorRecordList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query master list
*
* @param loginUser login user
* @return master information list
*/
public Map<String,Object> queryMaster(User loginUser) {
Map<String, Object> result = new HashMap<>();
List<Server> masterServers = getServerListFromZK(true);
result.put(Constants.DATA_LIST, masterServers);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* query zookeeper state
*
* @param loginUser login user
* @return zookeeper information list
*/
public Map<String,Object> queryZookeeperState(User loginUser) {
Map<String, Object> result = new HashMap<>();
List<ZookeeperRecord> zookeeperRecordList = zookeeperMonitor.zookeeperInfoList();
result.put(Constants.DATA_LIST, zookeeperRecordList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query worker list
*
* @param loginUser login user
* @return worker information list
*/
public Map<String,Object> queryWorker(User loginUser) {
Map<String, Object> result = new HashMap<>();
List<WorkerServerModel> workerServers = getServerListFromZK(false)
.stream()
.map((Server server) -> {
WorkerServerModel model = new WorkerServerModel();
model.setId(server.getId());
model.setHost(server.getHost());
model.setPort(server.getPort());
model.setZkDirectories(Sets.newHashSet(server.getZkDirectory()));
model.setResInfo(server.getResInfo());
model.setCreateTime(server.getCreateTime());
model.setLastHeartbeatTime(server.getLastHeartbeatTime());
return model;
})
.collect(Collectors.toList());
Map<String, WorkerServerModel> workerHostPortServerMapping = workerServers
.stream()
.collect(Collectors.toMap(
(WorkerServerModel worker) -> {
String[] s = worker.getZkDirectories().iterator().next().split("/");
return s[s.length - 1];
}
, Function.identity()
, (WorkerServerModel oldOne, WorkerServerModel newOne) -> {
oldOne.getZkDirectories().addAll(newOne.getZkDirectories());
return oldOne;
}));
result.put(Constants.DATA_LIST, workerHostPortServerMapping.values());
putMsg(result,Status.SUCCESS);
return result;
}
public List<Server> getServerListFromZK(boolean isMaster) {
checkNotNull(zookeeperMonitor);
ZKNodeType zkNodeType = isMaster ? ZKNodeType.MASTER : ZKNodeType.WORKER;
return zookeeperMonitor.getServersList(zkNodeType);
}
}

23
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

@ -23,7 +23,6 @@ import org.apache.dolphinscheduler.api.dto.ProcessMeta;
import org.apache.dolphinscheduler.api.dto.treeview.Instance; import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto; import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService; import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
@ -111,8 +110,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
* process definition service impl * process definition service impl
*/ */
@Service @Service
public class ProcessDefinitionServiceImpl extends BaseService implements public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements ProcessDefinitionService {
ProcessDefinitionService {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class); private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class);
@ -146,6 +144,9 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
@Autowired @Autowired
private ProcessService processService; private ProcessService processService;
@Autowired
private SchedulerService schedulerService;
/** /**
* create process definition * create process definition
* *
@ -273,7 +274,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
@Override @Override
public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) { public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) {
HashMap<String, Object> result = new HashMap<>(5); HashMap<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -399,7 +400,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
String desc, String desc,
String locations, String locations,
String connects) { String connects) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -514,7 +515,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) { public Map<String, Object> deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName); Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -634,7 +635,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
// set status // set status
schedule.setReleaseState(ReleaseState.OFFLINE); schedule.setReleaseState(ReleaseState.OFFLINE);
scheduleMapper.updateById(schedule); scheduleMapper.updateById(schedule);
SchedulerService.deleteSchedule(project.getId(), schedule.getId()); schedulerService.deleteSchedule(project.getId(), schedule.getId());
} }
break; break;
default: default:
@ -823,7 +824,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
@Override @Override
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) { public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
String processMetaJson = FileUtils.file2String(file); String processMetaJson = FileUtils.file2String(file);
List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class); List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class);
@ -992,7 +993,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
} }
//recursive sub-process parameter correction map key for old process id value for new process id //recursive sub-process parameter correction map key for old process id value for new process id
Map<Integer, Integer> subProcessIdMap = new HashMap<>(20); Map<Integer, Integer> subProcessIdMap = new HashMap<>();
List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements()) List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements())
.filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText())) .filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText()))
@ -1283,7 +1284,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
@Override @Override
public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) { public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) {
HashMap<String, Object> result = new HashMap<>(5); HashMap<String, Object> result = new HashMap<>();
List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(projectId); List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(projectId);
result.put(Constants.DATA_LIST, resourceList); result.put(Constants.DATA_LIST, resourceList);
@ -1494,7 +1495,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
Integer processId, Integer processId,
Project targetProject) throws JsonProcessingException { Project targetProject) throws JsonProcessingException {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.selectById(processId); ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) { if (processDefinition == null) {

7
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java

@ -18,7 +18,6 @@
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
@ -42,9 +41,11 @@ import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
/**
* process definition version service impl
*/
@Service @Service
public class ProcessDefinitionVersionServiceImpl extends BaseService implements public class ProcessDefinitionVersionServiceImpl extends BaseServiceImpl implements ProcessDefinitionVersionService {
ProcessDefinitionVersionService {
@Autowired @Autowired
private ProcessDefinitionVersionMapper processDefinitionVersionMapper; private ProcessDefinitionVersionMapper processDefinitionVersionMapper;

740
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java

@ -0,0 +1,740 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE;
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ExecutorService;
import org.apache.dolphinscheduler.api.service.LoggerService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.UsersService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* process instance service impl
*/
@Service
public class ProcessInstanceServiceImpl extends BaseServiceImpl implements ProcessInstanceService {
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessService processService;
@Autowired
ProcessInstanceMapper processInstanceMapper;
@Autowired
ProcessDefinitionMapper processDefineMapper;
@Autowired
ProcessDefinitionService processDefinitionService;
@Autowired
ProcessDefinitionVersionService processDefinitionVersionService;
@Autowired
ExecutorService execService;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
LoggerService loggerService;
@Autowired
UsersService usersService;
/**
* return top n SUCCESS process instance order by running time which started between startTime and endTime
*/
public Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
if (0 > size) {
putMsg(result, Status.NEGTIVE_SIZE_NUMBER_ERROR, size);
return result;
}
if (Objects.isNull(startTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.START_TIME);
return result;
}
Date start = DateUtils.stringToDate(startTime);
if (Objects.isNull(endTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.END_TIME);
return result;
}
Date end = DateUtils.stringToDate(endTime);
if (start == null || end == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE);
return result;
}
if (start.getTime() > end.getTime()) {
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR, startTime, endTime);
return result;
}
List<ProcessInstance> processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS);
result.put(DATA_LIST, processInstances);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process instance by id
*
* @param loginUser login user
* @param projectName project name
* @param processId process instance id
* @return process instance detail
*/
public Map<String, Object> queryProcessInstanceById(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
processInstance.setWarningGroupId(processDefinition.getWarningGroupId());
result.put(DATA_LIST, processInstance);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* paging query process instance list, filtering according to project, process definition, time range, keyword, process status
*
* @param loginUser login user
* @param projectName project name
* @param pageNo page number
* @param pageSize page size
* @param processDefineId process definition id
* @param searchVal search value
* @param stateType state type
* @param host host
* @param startDate start time
* @param endDate end time
* @return process instance list
*/
public Map<String, Object> queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId,
String startDate, String endDate,
String searchVal, String executorName, ExecutionStatus stateType, String host,
Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
int[] statusArray = null;
// filter by state
if (stateType != null) {
statusArray = new int[]{stateType.ordinal()};
}
Date start = null;
Date end = null;
try {
if (StringUtils.isNotEmpty(startDate)) {
start = DateUtils.getScheduleDate(startDate);
}
if (StringUtils.isNotEmpty(endDate)) {
end = DateUtils.getScheduleDate(endDate);
}
} catch (Exception e) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE);
return result;
}
Page<ProcessInstance> page = new Page<>(pageNo, pageSize);
PageInfo<ProcessInstance> pageInfo = new PageInfo<>(pageNo, pageSize);
int executorId = usersService.getUserIdByName(executorName);
IPage<ProcessInstance> processInstanceList =
processInstanceMapper.queryProcessInstanceListPaging(page,
project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end);
List<ProcessInstance> processInstances = processInstanceList.getRecords();
for (ProcessInstance processInstance : processInstances) {
processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime()));
User executor = usersService.queryUser(processInstance.getExecutorId());
if (null != executor) {
processInstance.setExecutorName(executor.getUserName());
}
}
pageInfo.setTotalCount((int) processInstanceList.getTotal());
pageInfo.setLists(processInstances);
result.put(DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query task list by process instance id
*
* @param loginUser login user
* @param projectName project name
* @param processId process instance id
* @return task list for the process instance
* @throws IOException io exception
*/
public Map<String, Object> queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processId);
addDependResultForTaskList(taskInstanceList);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString());
resultMap.put(TASK_LIST, taskInstanceList);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* add dependent result for dependent task
*/
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
for (TaskInstance taskInstance : taskInstanceList) {
if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) {
Result<String> logResult = loggerService.queryLog(
taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT);
if (logResult.getCode() == Status.SUCCESS.ordinal()) {
String log = logResult.getData();
Map<String, DependResult> resultMap = parseLogForDependentResult(log);
taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap));
}
}
}
}
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException {
Map<String, DependResult> resultMap = new HashMap<>();
if (StringUtils.isEmpty(log)) {
return resultMap;
}
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line;
while ((line = br.readLine()) != null) {
if (line.contains(DEPENDENT_SPLIT)) {
String[] tmpStringArray = line.split(":\\|\\|");
if (tmpStringArray.length != 2) {
continue;
}
String dependResultString = tmpStringArray[1];
String[] dependStringArray = dependResultString.split(",");
if (dependStringArray.length != 2) {
continue;
}
String key = dependStringArray[0].trim();
DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim());
resultMap.put(key, dependResult);
}
}
return resultMap;
}
/**
* query sub process instance detail info by task id
*
* @param loginUser login user
* @param projectName project name
* @param taskId task id
* @return sub process instance detail
*/
public Map<String, Object> querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
TaskInstance taskInstance = processService.findTaskInstanceById(taskId);
if (taskInstance == null) {
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
}
if (!taskInstance.isSubProcess()) {
putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName());
return result;
}
ProcessInstance subWorkflowInstance = processService.findSubProcessInstance(
taskInstance.getProcessInstanceId(), taskInstance.getId());
if (subWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put(Constants.SUBPROCESS_INSTANCE_ID, subWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update process instance
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceJson process instance json
* @param processInstanceId process instance id
* @param scheduleTime schedule time
* @param syncDefine sync define
* @param flag flag
* @param locations locations
* @param connects connects
* @return update result code
* @throws ParseException parse exception for json parse
*/
public Map<String, Object> updateProcessInstance(User loginUser, String projectName, Integer processInstanceId,
String processInstanceJson, String scheduleTime, Boolean syncDefine,
Flag flag, String locations, String connects) throws ParseException {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
//check project permission
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
//check process instance exists
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
//check process instance status
if (!processInstance.getState().typeIsFinished()) {
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR,
processInstance.getName(), processInstance.getState().toString(), "update");
return result;
}
Date schedule = null;
schedule = processInstance.getScheduleTime();
if (scheduleTime != null) {
schedule = DateUtils.getScheduleDate(scheduleTime);
}
processInstance.setScheduleTime(schedule);
processInstance.setLocations(locations);
processInstance.setConnects(connects);
String globalParams = null;
String originDefParams = null;
int timeout = processInstance.getTimeout();
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
if (StringUtils.isNotEmpty(processInstanceJson)) {
ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class);
//check workflow json is valid
Map<String, Object> checkFlowJson = processDefinitionService.checkProcessNodeList(processData, processInstanceJson);
if (checkFlowJson.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
originDefParams = JSONUtils.toJsonString(processData.getGlobalParams());
List<Property> globalParamList = processData.getGlobalParams();
Map<String, String> globalParamMap = Optional.ofNullable(globalParamList).orElse(Collections.emptyList()).stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList,
processInstance.getCmdTypeIfComplement(), schedule);
timeout = processData.getTimeout();
processInstance.setTimeout(timeout);
Tenant tenant = processService.getTenantForProcess(processData.getTenantId(),
processDefinition.getUserId());
if (tenant != null) {
processInstance.setTenantCode(tenant.getTenantCode());
}
// get the processinstancejson before saving,and then save the name and taskid
String oldJson = processInstance.getProcessInstanceJson();
if (StringUtils.isNotEmpty(oldJson)) {
processInstanceJson = processService.changeJson(processData,oldJson);
}
processInstance.setProcessInstanceJson(processInstanceJson);
processInstance.setGlobalParams(globalParams);
}
int update = processService.updateProcessInstance(processInstance);
int updateDefine = 1;
if (Boolean.TRUE.equals(syncDefine)) {
processDefinition.setProcessDefinitionJson(processInstanceJson);
processDefinition.setGlobalParams(originDefParams);
processDefinition.setLocations(locations);
processDefinition.setConnects(connects);
processDefinition.setTimeout(timeout);
processDefinition.setUpdateTime(new Date());
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition);
processDefinition.setVersion(version);
updateDefine = processDefineMapper.updateById(processDefinition);
}
if (update > 0 && updateDefine > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* query parent process instance detail info by sub process instance id
*
* @param loginUser login user
* @param projectName project name
* @param subId sub process id
* @return parent instance detail
*/
public Map<String, Object> queryParentInstanceBySubId(User loginUser, String projectName, Integer subId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId);
if (subInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId);
return result;
}
if (subInstance.getIsSubProcess() == Flag.NO) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName());
return result;
}
ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId);
if (parentWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put(Constants.PARENT_WORKFLOW_INSTANCE, parentWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete process instance by id, at the same timedelete task instance and their mapping relation data
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (null == processInstance) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
processService.removeTaskLogFile(processInstanceId);
// delete database cascade
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
processService.deleteWorkProcessMapByParentId(processInstanceId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR);
}
return result;
}
/**
* view process instance variables
*
* @param processInstanceId process instance id
* @return variables data
*/
public Map<String, Object> viewVariables(Integer processInstanceId) {
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
Map<String, String> timeParams = BusinessTimeUtils
.getBusinessTime(processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime());
String workflowInstanceJson = processInstance.getProcessInstanceJson();
ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class);
String userDefinedParams = processInstance.getGlobalParams();
// global params
List<Property> globalParams = new ArrayList<>();
if (userDefinedParams != null && userDefinedParams.length() > 0) {
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
}
List<TaskNode> taskNodeList = workflowData.getTasks();
// global param string
String globalParamStr = JSONUtils.toJsonString(globalParams);
globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams);
globalParams = JSONUtils.toList(globalParamStr, Property.class);
for (Property property : globalParams) {
timeParams.put(property.getProp(), property.getValue());
}
// local params
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>();
for (TaskNode taskNode : taskNodeList) {
String parameter = taskNode.getParams();
Map<String, String> map = JSONUtils.toMap(parameter);
String localParams = map.get(LOCAL_PARAMS);
if (localParams != null && !localParams.isEmpty()) {
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams);
List<Property> localParamsList = JSONUtils.toList(localParams, Property.class);
Map<String, Object> localParamsMap = new HashMap<>();
localParamsMap.put(Constants.TASK_TYPE, taskNode.getType());
localParamsMap.put(Constants.LOCAL_PARAMS_LIST, localParamsList);
if (CollectionUtils.isNotEmpty(localParamsList)) {
localUserDefParams.put(taskNode.getName(), localParamsMap);
}
}
}
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(GLOBAL_PARAMS, globalParams);
resultMap.put(LOCAL_PARAMS, localUserDefParams);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* encapsulation gantt structure
*
* @param processInstanceId process instance id
* @return gantt tree data
* @throws Exception exception when json parse
*/
public Map<String, Object> viewGantt(Integer processInstanceId) throws Exception {
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
GanttDto ganttDto = new GanttDto();
DAG<String, TaskNode, TaskNodeRelation> dag = processInstance2DAG(processInstance);
//topological sort
List<String> nodeList = dag.topologicalSort();
ganttDto.setTaskNames(nodeList);
List<Task> taskList = new ArrayList<>();
for (String node : nodeList) {
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node);
if (taskInstance == null) {
continue;
}
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
Task task = new Task();
task.setTaskName(taskInstance.getName());
task.getStartDate().add(startTime.getTime());
task.getEndDate().add(endTime.getTime());
task.setIsoStart(startTime);
task.setIsoEnd(endTime);
task.setStatus(taskInstance.getState().toString());
task.setExecutionDate(taskInstance.getStartTime());
task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime()));
taskList.add(task);
}
ganttDto.setTasks(taskList);
result.put(DATA_LIST, ganttDto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* process instance to DAG
*
* @param processInstance input process instance
* @return process instance dag.
*/
private static DAG<String, TaskNode, TaskNodeRelation> processInstance2DAG(ProcessInstance processInstance) {
String processDefinitionJson = processInstance.getProcessInstanceJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
return DagHelper.buildDagGraph(processDag);
}
/**
* query process instance by processDefinitionId and stateArray
* @param processDefinitionId processDefinitionId
* @param states states array
* @return process instance list
*/
public List<ProcessInstance> queryByProcessDefineIdAndStatus(int processDefinitionId, int[] states) {
return processInstanceMapper.queryByProcessDefineIdAndStatus(processDefinitionId, states);
}
/**
* query process instance by processDefinitionId
* @param processDefinitionId processDefinitionId
* @param size size
* @return process instance list
*/
public List<ProcessInstance> queryByProcessDefineId(int processDefinitionId,int size) {
return processInstanceMapper.queryByProcessDefineId(processDefinitionId, size);
}
}

5
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java

@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc; import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
@ -48,10 +47,10 @@ import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/** /**
* project service implement * project service impl
**/ **/
@Service @Service
public class ProjectServiceImpl extends BaseService implements ProjectService { public class ProjectServiceImpl extends BaseServiceImpl implements ProjectService {
@Autowired @Autowired
private ProjectMapper projectMapper; private ProjectMapper projectMapper;

294
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java

@ -0,0 +1,294 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.QueueService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Queue;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.QueueMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* queue service impl
*/
@Service
public class QueueServiceImpl extends BaseServiceImpl implements QueueService {
private static final Logger logger = LoggerFactory.getLogger(QueueServiceImpl.class);
@Autowired
private QueueMapper queueMapper;
@Autowired
private UserMapper userMapper;
/**
* query queue list
*
* @param loginUser login user
* @return queue list
*/
public Map<String, Object> queryList(User loginUser) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
List<Queue> queueList = queueMapper.selectList(null);
result.put(Constants.DATA_LIST, queueList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query queue list paging
*
* @param loginUser login user
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @return queue list
*/
public Map<String, Object> queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
Page<Queue> page = new Page<>(pageNo, pageSize);
IPage<Queue> queueList = queueMapper.queryQueuePaging(page, searchVal);
Integer count = (int) queueList.getTotal();
PageInfo<Queue> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount(count);
pageInfo.setLists(queueList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* create queue
*
* @param loginUser login user
* @param queue queue
* @param queueName queue name
* @return create result
*/
public Map<String, Object> createQueue(User loginUser, String queue, String queueName) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
if (StringUtils.isEmpty(queue)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE);
return result;
}
if (StringUtils.isEmpty(queueName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME);
return result;
}
if (checkQueueNameExist(queueName)) {
putMsg(result, Status.QUEUE_NAME_EXIST, queueName);
return result;
}
if (checkQueueExist(queue)) {
putMsg(result, Status.QUEUE_VALUE_EXIST, queue);
return result;
}
Queue queueObj = new Queue();
Date now = new Date();
queueObj.setQueue(queue);
queueObj.setQueueName(queueName);
queueObj.setCreateTime(now);
queueObj.setUpdateTime(now);
queueMapper.insert(queueObj);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update queue
*
* @param loginUser login user
* @param queue queue
* @param id queue id
* @param queueName queue name
* @return update result code
*/
public Map<String, Object> updateQueue(User loginUser, int id, String queue, String queueName) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
if (StringUtils.isEmpty(queue)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE);
return result;
}
if (StringUtils.isEmpty(queueName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME);
return result;
}
Queue queueObj = queueMapper.selectById(id);
if (queueObj == null) {
putMsg(result, Status.QUEUE_NOT_EXIST, id);
return result;
}
// whether queue value or queueName is changed
if (queue.equals(queueObj.getQueue()) && queueName.equals(queueObj.getQueueName())) {
putMsg(result, Status.NEED_NOT_UPDATE_QUEUE);
return result;
}
// check queue name is exist
if (!queueName.equals(queueObj.getQueueName())
&& checkQueueNameExist(queueName)) {
putMsg(result, Status.QUEUE_NAME_EXIST, queueName);
return result;
}
// check queue value is exist
if (!queue.equals(queueObj.getQueue()) && checkQueueExist(queue)) {
putMsg(result, Status.QUEUE_VALUE_EXIST, queue);
return result;
}
// check old queue using by any user
if (checkIfQueueIsInUsing(queueObj.getQueueName(), queueName)) {
//update user related old queue
Integer relatedUserNums = userMapper.updateUserQueue(queueObj.getQueueName(), queueName);
logger.info("old queue have related {} user, exec update user success.", relatedUserNums);
}
// update queue
Date now = new Date();
queueObj.setQueue(queue);
queueObj.setQueueName(queueName);
queueObj.setUpdateTime(now);
queueMapper.updateById(queueObj);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify queue and queueName
*
* @param queue queue
* @param queueName queue name
* @return true if the queue name not exists, otherwise return false
*/
public Result<Object> verifyQueue(String queue, String queueName) {
Result<Object> result = new Result<>();
if (StringUtils.isEmpty(queue)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE);
return result;
}
if (StringUtils.isEmpty(queueName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME);
return result;
}
if (checkQueueNameExist(queueName)) {
putMsg(result, Status.QUEUE_NAME_EXIST, queueName);
return result;
}
if (checkQueueExist(queue)) {
putMsg(result, Status.QUEUE_VALUE_EXIST, queue);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check queue exist
* if exists return truenot exists return false
* check queue exist
*
* @param queue queue
* @return true if the queue not exists, otherwise return false
*/
private boolean checkQueueExist(String queue) {
return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(queue, null));
}
/**
* check queue name exist
* if exists return truenot exists return false
*
* @param queueName queue name
* @return true if the queue name not exists, otherwise return false
*/
private boolean checkQueueNameExist(String queueName) {
return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(null, queueName));
}
/**
* check old queue name using by any user
* if need to update user
*
* @param oldQueue old queue name
* @param newQueue new queue name
* @return true if need to update user
*/
private boolean checkIfQueueIsInUsing (String oldQueue, String newQueue) {
return !oldQueue.equals(newQueue) && CollectionUtils.isNotEmpty(userMapper.queryUserListByQueue(oldQueue));
}
}

1312
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java

File diff suppressed because it is too large Load Diff

599
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java

@ -0,0 +1,599 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.dto.ScheduleParam;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ExecutorService;
import org.apache.dolphinscheduler.api.service.MonitorService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob;
import org.apache.dolphinscheduler.service.quartz.QuartzExecutors;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* scheduler service impl
*/
@Service
public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerService {
private static final Logger logger = LoggerFactory.getLogger(SchedulerServiceImpl.class);
@Autowired
private ProjectService projectService;
@Autowired
private ExecutorService executorService;
@Autowired
private MonitorService monitorService;
@Autowired
private ProcessService processService;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* save schedule
*
* @param loginUser login user
* @param projectName project name
* @param processDefineId process definition id
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param processInstancePriority process instance priority
* @param workerGroup worker group
* @return create result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> insertSchedule(User loginUser, String projectName,
Integer processDefineId,
String schedule,
WarningType warningType,
int warningGroupId,
FailureStrategy failureStrategy,
Priority processInstancePriority,
String workerGroup) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
// check work flow define release state
ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId);
result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
Schedule scheduleObj = new Schedule();
Date now = new Date();
scheduleObj.setProjectName(projectName);
scheduleObj.setProcessDefinitionId(processDefinition.getId());
scheduleObj.setProcessDefinitionName(processDefinition.getName());
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
scheduleObj.setStartTime(scheduleParam.getStartTime());
scheduleObj.setEndTime(scheduleParam.getEndTime());
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) {
logger.error("{} verify failure", scheduleParam.getCrontab());
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab());
return result;
}
scheduleObj.setCrontab(scheduleParam.getCrontab());
scheduleObj.setWarningType(warningType);
scheduleObj.setWarningGroupId(warningGroupId);
scheduleObj.setFailureStrategy(failureStrategy);
scheduleObj.setCreateTime(now);
scheduleObj.setUpdateTime(now);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setUserName(loginUser.getUserName());
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setProcessInstancePriority(processInstancePriority);
scheduleObj.setWorkerGroup(workerGroup);
scheduleMapper.insert(scheduleObj);
/**
* updateProcessInstance receivers and cc by process definition id
*/
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
// return scheduler object with ID
result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId()));
putMsg(result, Status.SUCCESS);
result.put("scheduleId", scheduleObj.getId());
return result;
}
/**
* updateProcessInstance schedule
*
* @param loginUser login user
* @param projectName project name
* @param id scheduler id
* @param scheduleExpression scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workerGroup worker group
* @param processInstancePriority process instance priority
* @param scheduleStatus schedule status
* @return update result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> updateSchedule(User loginUser,
String projectName,
Integer id,
String scheduleExpression,
WarningType warningType,
int warningGroupId,
FailureStrategy failureStrategy,
ReleaseState scheduleStatus,
Priority processInstancePriority,
String workerGroup) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
// check schedule exists
Schedule schedule = scheduleMapper.selectById(id);
if (schedule == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id);
return result;
}
ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId());
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId());
return result;
}
/**
* scheduling on-line status forbid modification
*/
if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) {
return result;
}
Date now = new Date();
// updateProcessInstance param
if (StringUtils.isNotEmpty(scheduleExpression)) {
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class);
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
schedule.setStartTime(scheduleParam.getStartTime());
schedule.setEndTime(scheduleParam.getEndTime());
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) {
putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab());
return result;
}
schedule.setCrontab(scheduleParam.getCrontab());
}
if (warningType != null) {
schedule.setWarningType(warningType);
}
schedule.setWarningGroupId(warningGroupId);
if (failureStrategy != null) {
schedule.setFailureStrategy(failureStrategy);
}
if (scheduleStatus != null) {
schedule.setReleaseState(scheduleStatus);
}
schedule.setWorkerGroup(workerGroup);
schedule.setUpdateTime(now);
schedule.setProcessInstancePriority(processInstancePriority);
scheduleMapper.updateById(schedule);
/**
* updateProcessInstance recipients and cc by process definition ID
*/
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* set schedule online or offline
*
* @param loginUser login user
* @param projectName project name
* @param id scheduler id
* @param scheduleStatus schedule status
* @return publish result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> setScheduleState(User loginUser,
String projectName,
Integer id,
ReleaseState scheduleStatus) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
// check schedule exists
Schedule scheduleObj = scheduleMapper.selectById(id);
if (scheduleObj == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id);
return result;
}
// check schedule release state
if (scheduleObj.getReleaseState() == scheduleStatus) {
logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}",
scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus);
putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus);
return result;
}
ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId());
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId());
return result;
}
if (scheduleStatus == ReleaseState.ONLINE) {
// check process definition release state
if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
logger.info("not release process definition id: {} , name : {}",
processDefinition.getId(), processDefinition.getName());
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName());
return result;
}
// check sub process definition release state
List<Integer> subProcessDefineIds = new ArrayList<>();
processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds);
Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]);
if (!subProcessDefineIds.isEmpty()) {
List<ProcessDefinition> subProcessDefinitionList =
processDefinitionMapper.queryDefinitionListByIdList(idArray);
if (subProcessDefinitionList != null && !subProcessDefinitionList.isEmpty()) {
for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) {
/**
* if there is no online process, exit directly
*/
if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) {
logger.info("not release process definition id: {} , name : {}",
subProcessDefinition.getId(), subProcessDefinition.getName());
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId());
return result;
}
}
}
}
}
// check master server exists
List<Server> masterServers = monitorService.getServerListFromZK(true);
if (masterServers.isEmpty()) {
putMsg(result, Status.MASTER_NOT_EXISTS);
return result;
}
// set status
scheduleObj.setReleaseState(scheduleStatus);
scheduleMapper.updateById(scheduleObj);
try {
switch (scheduleStatus) {
case ONLINE:
logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers);
setSchedule(project.getId(), scheduleObj);
break;
case OFFLINE:
logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers);
deleteSchedule(project.getId(), id);
break;
default:
putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString());
return result;
}
} catch (Exception e) {
result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure");
throw new ServiceException(result.get(Constants.MSG).toString());
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query schedule
*
* @param loginUser login user
* @param projectName project name
* @param processDefineId process definition id
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @return schedule list page
*/
public Map<String, Object> querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) {
HashMap<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId);
return result;
}
Page<Schedule> page = new Page<>(pageNo, pageSize);
IPage<Schedule> scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging(
page, processDefineId, searchVal
);
PageInfo<Schedule> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int) scheduleIPage.getTotal());
pageInfo.setLists(scheduleIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query schedule list
*
* @param loginUser login user
* @param projectName project name
* @return schedule list
*/
public Map<String, Object> queryScheduleList(User loginUser, String projectName) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
List<Schedule> schedules = scheduleMapper.querySchedulerListByProjectName(projectName);
result.put(Constants.DATA_LIST, schedules);
putMsg(result, Status.SUCCESS);
return result;
}
public void setSchedule(int projectId, Schedule schedule) {
int scheduleId = schedule.getId();
logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId);
Date startDate = schedule.getStartTime();
Date endDate = schedule.getEndTime();
String jobName = QuartzExecutors.buildJobName(scheduleId);
String jobGroupName = QuartzExecutors.buildJobGroupName(projectId);
Map<String, Object> dataMap = QuartzExecutors.buildDataMap(projectId, scheduleId, schedule);
QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, jobName, jobGroupName, startDate, endDate,
schedule.getCrontab(), dataMap);
}
/**
* delete schedule
*
* @param projectId project id
* @param scheduleId schedule id
* @throws RuntimeException runtime exception
*/
public void deleteSchedule(int projectId, int scheduleId) {
logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId);
String jobName = QuartzExecutors.buildJobName(scheduleId);
String jobGroupName = QuartzExecutors.buildJobGroupName(projectId);
if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) {
logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId);
throw new ServiceException("set offline failure");
}
}
/**
* check valid
*
* @param result result
* @param bool bool
* @param status status
* @return check result code
*/
private boolean checkValid(Map<String, Object> result, boolean bool, Status status) {
// timeout is valid
if (bool) {
putMsg(result, status);
return true;
}
return false;
}
/**
* delete schedule by id
*
* @param loginUser login user
* @param projectName project name
* @param scheduleId scheule id
* @return delete result code
*/
public Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
Schedule schedule = scheduleMapper.selectById(scheduleId);
if (schedule == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId);
return result;
}
// Determine if the login user is the owner of the schedule
if (loginUser.getId() != schedule.getUserId()
&& loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
// check schedule is already online
if (schedule.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId());
return result;
}
int delete = scheduleMapper.deleteById(scheduleId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
}
return result;
}
/**
* preview schedule
*
* @param loginUser login user
* @param projectName project name
* @param schedule schedule expression
* @return the next five fire time
*/
public Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule) {
Map<String, Object> result = new HashMap<>();
CronExpression cronExpression;
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
Date now = new Date();
Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime();
Date endTime = scheduleParam.getEndTime();
try {
cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab());
} catch (ParseException e) {
logger.error(e.getMessage(), e);
putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR);
return result;
}
List<Date> selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime, cronExpression, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT);
result.put(Constants.DATA_LIST, selfFireDateList.stream().map(DateUtils::dateToString));
putMsg(result, Status.SUCCESS);
return result;
}
}

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java

@ -17,22 +17,22 @@
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.api.controller.BaseController; import org.apache.dolphinscheduler.api.controller.BaseController;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.SessionService; import org.apache.dolphinscheduler.api.service.SessionService;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Session; import org.apache.dolphinscheduler.dao.entity.Session;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.SessionMapper; import org.apache.dolphinscheduler.dao.mapper.SessionMapper;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -44,7 +44,7 @@ import org.springframework.web.util.WebUtils;
* session service implement * session service implement
*/ */
@Service @Service
public class SessionServiceImpl extends BaseService implements SessionService { public class SessionServiceImpl extends BaseServiceImpl implements SessionService {
private static final Logger logger = LoggerFactory.getLogger(SessionService.class); private static final Logger logger = LoggerFactory.getLogger(SessionService.class);

207
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java

@ -0,0 +1,207 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.TaskInstanceService;
import org.apache.dolphinscheduler.api.service.UsersService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.MessageFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* task instance service impl
*/
@Service
public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInstanceService {
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessService processService;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
ProcessInstanceService processInstanceService;
@Autowired
UsersService usersService;
/**
* query task list by project, process instance, task name, task start time, task end time, task status, keyword paging
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @param searchVal search value
* @param taskName task name
* @param stateType state type
* @param host host
* @param startDate start time
* @param endDate end time
* @param pageNo page number
* @param pageSize page size
* @return task list page
*/
public Map<String, Object> queryTaskListPaging(User loginUser, String projectName,
Integer processInstanceId, String processInstanceName, String taskName, String executorName, String startDate,
String endDate, String searchVal, ExecutionStatus stateType, String host,
Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
int[] statusArray = null;
if (stateType != null) {
statusArray = new int[]{stateType.ordinal()};
}
Date start = null;
Date end = null;
if (StringUtils.isNotEmpty(startDate)) {
start = DateUtils.getScheduleDate(startDate);
if (start == null) {
return generateInvalidParamRes(result, "startDate");
}
}
if (StringUtils.isNotEmpty(endDate)) {
end = DateUtils.getScheduleDate(endDate);
if (end == null) {
return generateInvalidParamRes(result, "endDate");
}
}
Page<TaskInstance> page = new Page<>(pageNo, pageSize);
PageInfo<Map<String, Object>> pageInfo = new PageInfo<>(pageNo, pageSize);
int executorId = usersService.getUserIdByName(executorName);
IPage<TaskInstance> taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging(
page, project.getId(), processInstanceId, processInstanceName, searchVal, taskName, executorId, statusArray, host, start, end
);
Set<String> exclusionSet = new HashSet<>();
exclusionSet.add(Constants.CLASS);
exclusionSet.add("taskJson");
List<TaskInstance> taskInstanceList = taskInstanceIPage.getRecords();
for (TaskInstance taskInstance : taskInstanceList) {
taskInstance.setDuration(DateUtils.format2Duration(taskInstance.getStartTime(), taskInstance.getEndTime()));
User executor = usersService.queryUser(taskInstance.getExecutorId());
if (null != executor) {
taskInstance.setExecutorName(executor.getUserName());
}
}
pageInfo.setTotalCount((int) taskInstanceIPage.getTotal());
pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(), exclusionSet));
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* change one task instance's state from failure to forced success
*
* @param loginUser login user
* @param projectName project name
* @param taskInstanceId task instance id
* @return the result code and msg
*/
public Map<String, Object> forceTaskSuccess(User loginUser, String projectName, Integer taskInstanceId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check user auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
// check whether the task instance can be found
TaskInstance task = taskInstanceMapper.selectById(taskInstanceId);
if (task == null) {
putMsg(result, Status.TASK_INSTANCE_NOT_FOUND);
return result;
}
// check whether the task instance state type is failure
if (!task.getState().typeIsFailure()) {
putMsg(result, Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState().toString());
return result;
}
// change the state of the task instance
task.setState(ExecutionStatus.FORCED_SUCCESS);
int changedNum = taskInstanceMapper.updateById(task);
if (changedNum > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.FORCE_TASK_SUCCESS_ERROR);
}
return result;
}
/***
* generate {@link org.apache.dolphinscheduler.api.enums.Status#REQUEST_PARAMS_NOT_VALID_ERROR} res with param name
* @param result exist result map
* @param params invalid params name
* @return update result map
*/
private Map<String, Object> generateInvalidParamRes(Map<String, Object> result, String params) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), params));
return result;
}
}

85
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskRecordServiceImpl.java

@ -0,0 +1,85 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.TASK_RECORD_TABLE_HISTORY_HIVE_LOG;
import static org.apache.dolphinscheduler.common.Constants.TASK_RECORD_TABLE_HIVE_LOG;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.TaskRecordService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.TaskRecordDao;
import org.apache.dolphinscheduler.dao.entity.TaskRecord;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.stereotype.Service;
/**
* task record service impl
*/
@Service
public class TaskRecordServiceImpl extends BaseServiceImpl implements TaskRecordService {
/**
* query task record list paging
*
* @param taskName task name
* @param state state
* @param sourceTable source table
* @param destTable destination table
* @param taskDate task date
* @param startDate start time
* @param endDate end time
* @param pageNo page numbere
* @param pageSize page size
* @param isHistory is history
* @return task record list
*/
public Map<String,Object> queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate,
String taskDate, String sourceTable,
String destTable, String endDate,
String state, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
PageInfo<TaskRecord> pageInfo = new PageInfo<>(pageNo, pageSize);
Map<String, String> map = new HashMap<>();
map.put("taskName", taskName);
map.put("taskDate", taskDate);
map.put("state", state);
map.put("sourceTable", sourceTable);
map.put("targetTable", destTable);
map.put("startTime", startDate);
map.put("endTime", endDate);
map.put("offset", pageInfo.getStart().toString());
map.put("pageSize", pageInfo.getPageSize().toString());
String table = isHistory ? TASK_RECORD_TABLE_HISTORY_HIVE_LOG : TASK_RECORD_TABLE_HIVE_LOG;
int count = TaskRecordDao.countTaskRecord(map, table);
List<TaskRecord> recordList = TaskRecordDao.queryAllTaskRecord(map, table);
pageInfo.setTotalCount(count);
pageInfo.setLists(recordList);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
}

21
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java

@ -18,7 +18,6 @@
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.TenantService; import org.apache.dolphinscheduler.api.service.TenantService;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.RegexUtils;
@ -42,8 +41,6 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@ -52,12 +49,10 @@ import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/** /**
* tenant service * tenant service impl
*/ */
@Service @Service
public class TenantServiceImpl extends BaseService implements TenantService { public class TenantServiceImpl extends BaseServiceImpl implements TenantService {
private static final Logger logger = LoggerFactory.getLogger(TenantServiceImpl.class);
@Autowired @Autowired
private TenantMapper tenantMapper; private TenantMapper tenantMapper;
@ -87,7 +82,7 @@ public class TenantServiceImpl extends BaseService implements TenantService {
int queueId, int queueId,
String desc) throws Exception { String desc) throws Exception {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false); result.put(Constants.STATUS, false);
if (isNotAdmin(loginUser, result)) { if (isNotAdmin(loginUser, result)) {
return result; return result;
@ -140,7 +135,7 @@ public class TenantServiceImpl extends BaseService implements TenantService {
*/ */
public Map<String, Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { public Map<String, Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) { if (isNotAdmin(loginUser, result)) {
return result; return result;
} }
@ -171,7 +166,7 @@ public class TenantServiceImpl extends BaseService implements TenantService {
public Map<String, Object> updateTenant(User loginUser, int id, String tenantCode, int queueId, public Map<String, Object> updateTenant(User loginUser, int id, String tenantCode, int queueId,
String desc) throws Exception { String desc) throws Exception {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false); result.put(Constants.STATUS, false);
if (isNotAdmin(loginUser, result)) { if (isNotAdmin(loginUser, result)) {
@ -233,7 +228,7 @@ public class TenantServiceImpl extends BaseService implements TenantService {
*/ */
@Transactional(rollbackFor = Exception.class) @Transactional(rollbackFor = Exception.class)
public Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception { public Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) { if (isNotAdmin(loginUser, result)) {
return result; return result;
@ -291,7 +286,7 @@ public class TenantServiceImpl extends BaseService implements TenantService {
*/ */
public Map<String, Object> queryTenantList(String tenantCode) { public Map<String, Object> queryTenantList(String tenantCode) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
List<Tenant> resourceList = tenantMapper.queryByTenantCode(tenantCode); List<Tenant> resourceList = tenantMapper.queryByTenantCode(tenantCode);
if (CollectionUtils.isNotEmpty(resourceList)) { if (CollectionUtils.isNotEmpty(resourceList)) {
@ -311,7 +306,7 @@ public class TenantServiceImpl extends BaseService implements TenantService {
*/ */
public Map<String, Object> queryTenantList(User loginUser) { public Map<String, Object> queryTenantList(User loginUser) {
Map<String, Object> result = new HashMap<>(5); Map<String, Object> result = new HashMap<>();
List<Tenant> resourceList = tenantMapper.selectList(null); List<Tenant> resourceList = tenantMapper.selectList(null);
result.put(Constants.DATA_LIST, resourceList); result.put(Constants.DATA_LIST, resourceList);

325
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java

@ -0,0 +1,325 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.UdfFuncService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* udf func service impl
*/
@Service
public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncService {
private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceImpl.class);
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private UDFUserMapper udfUserMapper;
/**
* create udf function
*
* @param loginUser login user
* @param type udf type
* @param funcName function name
* @param argTypes argument types
* @param database database
* @param desc description
* @param resourceId resource id
* @param className class name
* @return create result code
*/
public Result<Object> createUdfFunction(User loginUser,
String funcName,
String className,
String argTypes,
String database,
String desc,
UdfType type,
int resourceId) {
Result<Object> result = new Result<>();
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP);
return result;
}
// verify udf func name exist
if (checkUdfFuncNameExists(funcName)) {
putMsg(result, Status.UDF_FUNCTION_EXISTS);
return result;
}
Resource resource = resourceMapper.selectById(resourceId);
if (resource == null) {
logger.error("resourceId {} is not exist", resourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
//save data
UdfFunc udf = new UdfFunc();
Date now = new Date();
udf.setUserId(loginUser.getId());
udf.setFuncName(funcName);
udf.setClassName(className);
if (StringUtils.isNotEmpty(argTypes)) {
udf.setArgTypes(argTypes);
}
if (StringUtils.isNotEmpty(database)) {
udf.setDatabase(database);
}
udf.setDescription(desc);
udf.setResourceId(resourceId);
udf.setResourceName(resource.getFullName());
udf.setType(type);
udf.setCreateTime(now);
udf.setUpdateTime(now);
udfFuncMapper.insert(udf);
putMsg(result, Status.SUCCESS);
return result;
}
/**
*
* @param name name
* @return check result code
*/
private boolean checkUdfFuncNameExists(String name) {
List<UdfFunc> resource = udfFuncMapper.queryUdfByIdStr(null, name);
return resource != null && !resource.isEmpty();
}
/**
* query udf function
*
* @param id udf function id
* @return udf function detail
*/
public Map<String, Object> queryUdfFuncDetail(int id) {
Map<String, Object> result = new HashMap<>();
UdfFunc udfFunc = udfFuncMapper.selectById(id);
if (udfFunc == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
result.put(Constants.DATA_LIST, udfFunc);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance udf function
*
* @param udfFuncId udf function id
* @param type resource type
* @param funcName function name
* @param argTypes argument types
* @param database data base
* @param desc description
* @param resourceId resource id
* @param className class name
* @return update result code
*/
public Map<String, Object> updateUdfFunc(int udfFuncId,
String funcName,
String className,
String argTypes,
String database,
String desc,
UdfType type,
int resourceId) {
Map<String, Object> result = new HashMap<>();
// verify udfFunc is exist
UdfFunc udf = udfFuncMapper.selectUdfById(udfFuncId);
if (udf == null) {
result.put(Constants.STATUS, Status.UDF_FUNCTION_NOT_EXIST);
result.put(Constants.MSG, Status.UDF_FUNCTION_NOT_EXIST.getMsg());
return result;
}
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP);
return result;
}
// verify udfFuncName is exist
if (!funcName.equals(udf.getFuncName())) {
if (checkUdfFuncNameExists(funcName)) {
logger.error("UdfFunc {} has exist, can't create again.", funcName);
result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS);
result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg());
return result;
}
}
Resource resource = resourceMapper.selectById(resourceId);
if (resource == null) {
logger.error("resourceId {} is not exist", resourceId);
result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST);
result.put(Constants.MSG, Status.RESOURCE_NOT_EXIST.getMsg());
return result;
}
Date now = new Date();
udf.setFuncName(funcName);
udf.setClassName(className);
udf.setArgTypes(argTypes);
if (StringUtils.isNotEmpty(database)) {
udf.setDatabase(database);
}
udf.setDescription(desc);
udf.setResourceId(resourceId);
udf.setResourceName(resource.getFullName());
udf.setType(type);
udf.setUpdateTime(now);
udfFuncMapper.updateById(udf);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query udf function list paging
*
* @param loginUser login user
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @return udf function list page
*/
public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
PageInfo<UdfFunc> pageInfo = new PageInfo<>(pageNo, pageSize);
IPage<UdfFunc> udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo);
pageInfo.setTotalCount((int)udfFuncList.getTotal());
pageInfo.setLists(udfFuncList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get udf functions
*
* @param loginUser login user
* @param searchVal search value
* @param pageSize page size
* @param pageNo page number
* @return udf function list page
*/
private IPage<UdfFunc> getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) {
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
userId = 0;
}
Page<UdfFunc> page = new Page<>(pageNo, pageSize);
return udfFuncMapper.queryUdfFuncPaging(page, userId, searchVal);
}
/**
* query udf list
*
* @param loginUser login user
* @param type udf type
* @return udf func list
*/
public Map<String, Object> queryUdfFuncList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>();
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
userId = 0;
}
List<UdfFunc> udfFuncList = udfFuncMapper.getUdfFuncByType(userId, type);
result.put(Constants.DATA_LIST, udfFuncList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete udf function
*
* @param id udf function id
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Result<Object> delete(int id) {
Result<Object> result = new Result<>();
udfFuncMapper.deleteById(id);
udfUserMapper.deleteByUdfFuncId(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify udf function by name
*
* @param name name
* @return true if the name can user, otherwise return false
*/
public Result<Object> verifyUdfFuncByName(String name) {
Result<Object> result = new Result<>();
if (checkUdfFuncNameExists(name)) {
putMsg(result, Status.UDF_FUNCTION_EXISTS);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
}

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java

@ -18,7 +18,6 @@
package org.apache.dolphinscheduler.api.service.impl; package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.UiPluginService; import org.apache.dolphinscheduler.api.service.UiPluginService;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.PluginType; import org.apache.dolphinscheduler.common.enums.PluginType;
@ -34,20 +33,14 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
/** /**
* UiPluginServiceImpl * ui plugin service impl
*/ */
@Service @Service
public class UiPluginServiceImpl extends BaseService implements UiPluginService { public class UiPluginServiceImpl extends BaseServiceImpl implements UiPluginService {
@Autowired @Autowired
PluginDefineMapper pluginDefineMapper; PluginDefineMapper pluginDefineMapper;
private static final String LANGUAGE_REGEX = "\"([^\"]*)\"";
private static final String LANGUAGE_SYMBOL = "$t";
private static final String ESCAPE_SYMBOL = "\\";
@Override @Override
public Map<String, Object> queryUiPluginsByType(PluginType pluginType) { public Map<String, Object> queryUiPluginsByType(PluginType pluginType) {
Map<String, Object> result = new HashMap<>(); Map<String, Object> result = new HashMap<>();

1070
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java

File diff suppressed because it is too large Load Diff

108
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java

@ -0,0 +1,108 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.WorkFlowLineageService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage;
import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation;
import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* work flow lineage service impl
*/
@Service
public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkFlowLineageService {
@Autowired
private WorkFlowLineageMapper workFlowLineageMapper;
public Map<String, Object> queryWorkFlowLineageByName(String workFlowName, int projectId) {
Map<String, Object> result = new HashMap<>();
List<WorkFlowLineage> workFlowLineageList = workFlowLineageMapper.queryByName(workFlowName, projectId);
result.put(Constants.DATA_LIST, workFlowLineageList);
putMsg(result, Status.SUCCESS);
return result;
}
private void getWorkFlowRelationRecursion(Set<Integer> ids, List<WorkFlowRelation> workFlowRelations, Set<Integer> sourceIds) {
for (int id : ids) {
sourceIds.addAll(ids);
List<WorkFlowRelation> workFlowRelationsTmp = workFlowLineageMapper.querySourceTarget(id);
if (workFlowRelationsTmp != null && !workFlowRelationsTmp.isEmpty()) {
Set<Integer> idsTmp = new HashSet<>();
for (WorkFlowRelation workFlowRelation:workFlowRelationsTmp) {
if (!sourceIds.contains(workFlowRelation.getTargetWorkFlowId())) {
idsTmp.add(workFlowRelation.getTargetWorkFlowId());
}
}
workFlowRelations.addAll(workFlowRelationsTmp);
getWorkFlowRelationRecursion(idsTmp, workFlowRelations,sourceIds);
}
}
}
public Map<String, Object> queryWorkFlowLineageByIds(Set<Integer> ids,int projectId) {
Map<String, Object> result = new HashMap<>();
List<WorkFlowLineage> workFlowLineageList = workFlowLineageMapper.queryByIds(ids, projectId);
Map<String, Object> workFlowLists = new HashMap<>();
Set<Integer> idsV = new HashSet<>();
if (ids == null || ids.isEmpty()) {
for (WorkFlowLineage workFlowLineage:workFlowLineageList) {
idsV.add(workFlowLineage.getWorkFlowId());
}
} else {
idsV = ids;
}
List<WorkFlowRelation> workFlowRelations = new ArrayList<>();
Set<Integer> sourceIds = new HashSet<>();
getWorkFlowRelationRecursion(idsV, workFlowRelations, sourceIds);
Set<Integer> idSet = new HashSet<>();
//If the incoming parameter is not empty, you need to add downstream workflow detail attributes
if (ids != null && !ids.isEmpty()) {
for (WorkFlowRelation workFlowRelation : workFlowRelations) {
idSet.add(workFlowRelation.getTargetWorkFlowId());
}
for (int id : ids) {
idSet.remove(id);
}
if (!idSet.isEmpty()) {
workFlowLineageList.addAll(workFlowLineageMapper.queryByIds(idSet, projectId));
}
}
workFlowLists.put(Constants.WORKFLOW_LIST, workFlowLineageList);
workFlowLists.put(Constants.WORKFLOW_RELATION_LIST, workFlowRelations);
result.put(Constants.DATA_LIST, workFlowLists);
putMsg(result, Status.SUCCESS);
return result;
}
}

179
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java

@ -0,0 +1,179 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.WorkerGroupService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkerGroup;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* worker group service impl
*/
@Service
public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGroupService {
private static final String NO_NODE_EXCEPTION_REGEX = "KeeperException$NoNodeException";
@Autowired
protected ZookeeperCachedOperator zookeeperCachedOperator;
@Autowired
ProcessInstanceMapper processInstanceMapper;
/**
* query worker group paging
*
* @param loginUser login user
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @return worker group list page
*/
public Map<String, Object> queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal) {
// list from index
int fromIndex = (pageNo - 1) * pageSize;
// list to index
int toIndex = (pageNo - 1) * pageSize + pageSize;
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
List<WorkerGroup> workerGroups = getWorkerGroups(true);
List<WorkerGroup> resultDataList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(workerGroups)) {
List<WorkerGroup> searchValDataList = new ArrayList<>();
if (StringUtils.isNotEmpty(searchVal)) {
for (WorkerGroup workerGroup : workerGroups) {
if (workerGroup.getName().contains(searchVal)) {
searchValDataList.add(workerGroup);
}
}
} else {
searchValDataList = workerGroups;
}
if (searchValDataList.size() < pageSize) {
toIndex = (pageNo - 1) * pageSize + searchValDataList.size();
}
resultDataList = searchValDataList.subList(fromIndex, toIndex);
}
PageInfo<WorkerGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount(resultDataList.size());
pageInfo.setLists(resultDataList);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query all worker group
*
* @return all worker group list
*/
public Map<String, Object> queryAllGroup() {
Map<String, Object> result = new HashMap<>();
List<WorkerGroup> workerGroups = getWorkerGroups(false);
Set<String> availableWorkerGroupSet = workerGroups.stream()
.map(WorkerGroup::getName)
.collect(Collectors.toSet());
result.put(Constants.DATA_LIST, availableWorkerGroupSet);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get worker groups
*
* @param isPaging whether paging
* @return WorkerGroup list
*/
private List<WorkerGroup> getWorkerGroups(boolean isPaging) {
String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS;
List<WorkerGroup> workerGroups = new ArrayList<>();
List<String> workerGroupList;
try {
workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath);
} catch (Exception e) {
if (e.getMessage().contains(NO_NODE_EXCEPTION_REGEX)) {
if (!isPaging) {
//ignore noNodeException return Default
WorkerGroup wg = new WorkerGroup();
wg.setName(DEFAULT_WORKER_GROUP);
workerGroups.add(wg);
}
return workerGroups;
} else {
throw e;
}
}
for (String workerGroup : workerGroupList) {
String workerGroupPath = String.format("%s/%s", workerPath, workerGroup);
List<String> childrenNodes = zookeeperCachedOperator.getChildrenKeys(workerGroupPath);
String timeStamp = "";
for (int i = 0; i < childrenNodes.size(); i++) {
String ip = childrenNodes.get(i);
childrenNodes.set(i, ip.substring(0, ip.lastIndexOf(":")));
timeStamp = ip.substring(ip.lastIndexOf(":"));
}
if (CollectionUtils.isNotEmpty(childrenNodes)) {
WorkerGroup wg = new WorkerGroup();
wg.setName(workerGroup);
if (isPaging) {
wg.setIpList(childrenNodes);
String registeredIpValue = zookeeperCachedOperator.get(workerGroupPath + "/" + childrenNodes.get(0) + timeStamp);
wg.setCreateTime(DateUtils.stringToDate(registeredIpValue.split(",")[6]));
wg.setUpdateTime(DateUtils.stringToDate(registeredIpValue.split(",")[7]));
}
workerGroups.add(wg);
}
}
return workerGroups;
}
}

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java

@ -44,4 +44,13 @@ public class RegexUtils {
Matcher isNum = pattern.matcher(str); Matcher isNum = pattern.matcher(str);
return isNum.matches(); return isNum.matches();
} }
public static String escapeNRT(String str) {
// Logging should not be vulnerable to injection attacks: Replace pattern-breaking characters
if (str != null && !str.isEmpty()) {
return str.replaceAll("[\n|\r|\t]", "_");
}
return null;
}
} }

18
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.ApiApplicationServer;
@ -21,10 +22,12 @@ import org.apache.dolphinscheduler.api.service.SessionService;
import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.junit.*;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.context.junit4.SpringRunner;
@ -32,12 +35,14 @@ import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.WebApplicationContext;
/**
* abstract controller test
*/
@Ignore @Ignore
@RunWith(SpringRunner.class) @RunWith(SpringRunner.class)
@SpringBootTest(classes = ApiApplicationServer.class) @SpringBootTest(classes = ApiApplicationServer.class)
public class AbstractControllerTest { public class AbstractControllerTest {
private static Logger logger = LoggerFactory.getLogger(AbstractControllerTest.class);
public static final String SESSION_ID = "sessionId"; public static final String SESSION_ID = "sessionId";
protected MockMvc mockMvc; protected MockMvc mockMvc;
@ -49,6 +54,7 @@ public class AbstractControllerTest {
private SessionService sessionService; private SessionService sessionService;
protected User user; protected User user;
protected String sessionId; protected String sessionId;
@Before @Before
@ -57,13 +63,11 @@ public class AbstractControllerTest {
createSession(); createSession();
} }
@After @After
public void after(){ public void after(){
sessionService.signOut("127.0.0.1", user); sessionService.signOut("127.0.0.1", user);
} }
private void createSession(){ private void createSession(){
User loginUser = new User(); User loginUser = new User();

21
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java

@ -14,11 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -28,16 +35,12 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; /**
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; * access token controller test
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; */
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
public class AccessTokenControllerTest extends AbstractControllerTest{ public class AccessTokenControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(AccessTokenControllerTest.class);
private static Logger logger = LoggerFactory.getLogger(AccessTokenControllerTest.class);
@Test @Test
public void testCreateToken() throws Exception { public void testCreateToken() throws Exception {

17
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertGroupControllerTest.java

@ -17,10 +17,16 @@
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.AlertType; import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -30,12 +36,11 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; /**
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; * alert group controller test
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; */
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
public class AlertGroupControllerTest extends AbstractControllerTest{ public class AlertGroupControllerTest extends AbstractControllerTest{
private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class); private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class);
@Test @Test

27
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java

@ -14,37 +14,32 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult; import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import org.springframework.web.context.WebApplicationContext;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* data analysis controller test
*/
public class DataAnalysisControllerTest extends AbstractControllerTest{ public class DataAnalysisControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class);
private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class);
@Test @Test
public void testCountTaskState() throws Exception { public void testCountTaskState() throws Exception {

15
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java

@ -14,11 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
@ -29,12 +36,6 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/** /**
* data source controller test * data source controller test
*/ */

1
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java

@ -42,6 +42,7 @@ import org.springframework.util.MultiValueMap;
* executor controller test * executor controller test
*/ */
public class ExecutorControllerTest extends AbstractControllerTest { public class ExecutorControllerTest extends AbstractControllerTest {
private static Logger logger = LoggerFactory.getLogger(ExecutorControllerTest.class); private static Logger logger = LoggerFactory.getLogger(ExecutorControllerTest.class);
@Ignore @Ignore

13
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java

@ -14,11 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
@ -28,15 +34,10 @@ import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult; import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
/** /**
* logger controller test * logger controller test
*/ */
@Ignore @Ignore
public class LoggerControllerTest extends AbstractControllerTest { public class LoggerControllerTest extends AbstractControllerTest {

14
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java

@ -14,11 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -28,16 +34,12 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/** /**
* login controller test * login controller test
*/ */
public class LoginControllerTest extends AbstractControllerTest{ public class LoginControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(LoginControllerTest.class);
private static Logger logger = LoggerFactory.getLogger(LoginControllerTest.class);
@Test @Test
public void testLogin() throws Exception { public void testLogin() throws Exception {

13
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java

@ -14,11 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -26,10 +32,6 @@ import org.slf4j.LoggerFactory;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult; import org.springframework.test.web.servlet.MvcResult;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/** /**
* monitor controller test * monitor controller test
*/ */
@ -37,7 +39,6 @@ public class MonitorControllerTest extends AbstractControllerTest {
private static final Logger logger = LoggerFactory.getLogger(MonitorControllerTest.class); private static final Logger logger = LoggerFactory.getLogger(MonitorControllerTest.class);
@Test @Test
public void testListMaster() throws Exception { public void testListMaster() throws Exception {

1
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;

18
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java

@ -14,11 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
@ -29,17 +36,12 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/** /**
* project controller * project controller test
*/ */
public class ProjectControllerTest extends AbstractControllerTest{ public class ProjectControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(ProjectControllerTest.class);
private static Logger logger = LoggerFactory.getLogger(ProjectControllerTest.class);
@Test @Test
public void testCreateProject() throws Exception { public void testCreateProject() throws Exception {

14
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java

@ -14,11 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -28,11 +35,6 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/** /**
* queue controller test * queue controller test
*/ */

15
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java

@ -14,13 +14,20 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -30,15 +37,11 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/** /**
* resources controller test * resources controller test
*/ */
public class ResourcesControllerTest extends AbstractControllerTest{ public class ResourcesControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(ResourcesControllerTest.class); private static Logger logger = LoggerFactory.getLogger(ResourcesControllerTest.class);
@Test @Test

15
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java

@ -14,14 +14,21 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -31,15 +38,11 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/** /**
* scheduler controller test * scheduler controller test
*/ */
public class SchedulerControllerTest extends AbstractControllerTest{ public class SchedulerControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class);
@Test @Test

16
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java

@ -14,11 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -28,11 +34,11 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; /**
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; * task record controller test
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; */
public class TaskRecordControllerTest extends AbstractControllerTest { public class TaskRecordControllerTest extends AbstractControllerTest {
private static final Logger logger = LoggerFactory.getLogger(TaskRecordControllerTest.class); private static final Logger logger = LoggerFactory.getLogger(TaskRecordControllerTest.class);
@Test @Test

15
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java

@ -14,11 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -28,15 +35,11 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/** /**
* tenant controller test * tenant controller test
*/ */
public class TenantControllerTest extends AbstractControllerTest{ public class TenantControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(TenantControllerTest.class); private static Logger logger = LoggerFactory.getLogger(TenantControllerTest.class);
@Test @Test

19
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java

@ -14,11 +14,21 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -28,18 +38,11 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.ArrayList;
import java.util.List;
/** /**
* users controller test * users controller test
*/ */
public class UsersControllerTest extends AbstractControllerTest{ public class UsersControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(UsersControllerTest.class); private static Logger logger = LoggerFactory.getLogger(UsersControllerTest.class);
@Test @Test

14
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageControllerTest.java

@ -14,11 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller; package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -28,11 +34,11 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; /**
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; * work flow lineage controller test
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; */
public class WorkFlowLineageControllerTest extends AbstractControllerTest { public class WorkFlowLineageControllerTest extends AbstractControllerTest {
private static Logger logger = LoggerFactory.getLogger(WorkFlowLineageControllerTest.class); private static Logger logger = LoggerFactory.getLogger(WorkFlowLineageControllerTest.class);
@Test @Test

21
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java

@ -1,4 +1,3 @@
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
@ -15,11 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.controller;
package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -29,12 +35,11 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; /**
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; * worker group controller test
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; */
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
public class WorkerGroupControllerTest extends AbstractControllerTest{ public class WorkerGroupControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(WorkerGroupControllerTest.class); private static Logger logger = LoggerFactory.getLogger(WorkerGroupControllerTest.class);
@Test @Test

5
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
@ -48,6 +49,9 @@ import org.slf4j.LoggerFactory;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* access token service test
*/
@RunWith(MockitoJUnitRunner.class) @RunWith(MockitoJUnitRunner.class)
public class AccessTokenServiceTest { public class AccessTokenServiceTest {
@ -59,7 +63,6 @@ public class AccessTokenServiceTest {
@Mock @Mock
private AccessTokenMapper accessTokenMapper; private AccessTokenMapper accessTokenMapper;
@Test @Test
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public void testQueryAccessTokenList() { public void testQueryAccessTokenList() {

11
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java

@ -21,9 +21,9 @@ import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.eq;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.AlertGroupServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.AlertGroup;
@ -31,7 +31,6 @@ import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -48,13 +47,17 @@ import org.slf4j.LoggerFactory;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* alert group service test
*/
@RunWith(MockitoJUnitRunner.class) @RunWith(MockitoJUnitRunner.class)
public class AlertGroupServiceTest { public class AlertGroupServiceTest {
private static final Logger logger = LoggerFactory.getLogger(AlertGroupServiceTest.class); private static final Logger logger = LoggerFactory.getLogger(AlertGroupServiceTest.class);
@InjectMocks @InjectMocks
private AlertGroupService alertGroupService; private AlertGroupServiceImpl alertGroupService;
@Mock @Mock
private AlertGroupMapper alertGroupMapper; private AlertGroupMapper alertGroupMapper;
@ -64,7 +67,7 @@ public class AlertGroupServiceTest {
public void testQueryAlertGroup() { public void testQueryAlertGroup() {
Mockito.when(alertGroupMapper.queryAllGroupList()).thenReturn(getList()); Mockito.when(alertGroupMapper.queryAllGroupList()).thenReturn(getList());
HashMap<String, Object> result = alertGroupService.queryAlertgroup(); Map<String, Object> result = alertGroupService.queryAlertgroup();
logger.info(result.toString()); logger.info(result.toString());
List<AlertGroup> alertGroups = (List<AlertGroup>) result.get(Constants.DATA_LIST); List<AlertGroup> alertGroups = (List<AlertGroup>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(alertGroups)); Assert.assertTrue(CollectionUtils.isNotEmpty(alertGroups));

3
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java

@ -43,6 +43,9 @@ import org.mockito.Mock;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner; import org.mockito.junit.MockitoJUnitRunner;
/**
* alert plugin instance service test
*/
@RunWith(MockitoJUnitRunner.class) @RunWith(MockitoJUnitRunner.class)
public class AlertPluginInstanceServiceTest { public class AlertPluginInstanceServiceTest {

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save