From fee49557242a13307961e4280b404fbc846241de Mon Sep 17 00:00:00 2001
From: xiangzihao <460888207@qq.com>
Date: Wed, 4 Sep 2024 21:22:34 +0800
Subject: [PATCH] [DSIP-68] Unify the table structure and table fields in
backend and frontend ui sections (#16544)
---
.github/workflows/api-test.yml | 27 +-
.github/workflows/backend.yml | 137 +--
.github/workflows/e2e.yml | 34 +-
.../mysql/docker-compose-base.yaml | 35 +
.../schema-check/mysql/running-test.sh | 54 ++
.../workflows/schema-check/mysql/start-job.sh | 57 ++
.../postgresql/docker-compose-base.yaml | 34 +
.../schema-check/postgresql/running-test.sh | 54 ++
.../schema-check/postgresql/start-job.sh | 45 +
.github/workflows/unit-test.yml | 4 +-
.gitignore | 1 +
docs/docs/en/about/glossary.md | 4 +-
docs/docs/en/architecture/task-structure.md | 2 +-
.../docs/en/contribute/architecture-design.md | 2 +-
docs/docs/en/guide/parameter/context.md | 4 +-
docs/docs/en/guide/project/project-list.md | 16 +-
docs/docs/en/guide/resource/task-group.md | 2 +-
docs/docs/en/guide/task/sub-process.md | 10 +-
docs/docs/en/guide/upgrade/incompatible.md | 2 +
docs/docs/en/guide/upgrade/upgrade.md | 2 +-
docs/docs/zh/about/glossary.md | 2 +-
docs/docs/zh/architecture/task-structure.md | 2 +-
.../docs/zh/contribute/architecture-design.md | 2 +-
docs/docs/zh/guide/parameter/context.md | 4 +-
docs/docs/zh/guide/resource/task-group.md | 2 +-
docs/docs/zh/guide/task/sub-process.md | 10 +-
docs/docs/zh/guide/upgrade/incompatible.md | 2 +
docs/docs/zh/guide/upgrade/upgrade.md | 2 +-
.../{sub_process.png => sub_workflow.png} | Bin
.../api/test/cases/ExecutorAPITest.java | 46 +-
.../api/test/cases/ProjectAPITest.java | 2 +
.../api/test/cases/SchedulerAPITest.java | 30 +-
.../api/test/cases/TenantAPITest.java | 2 +
.../api/test/cases/WorkerGroupAPITest.java | 2 +
.../test/cases/WorkflowDefinitionAPITest.java | 136 +--
.../test/cases/WorkflowInstanceAPITest.java | 116 +--
.../api/test/pages/workflow/ExecutorPage.java | 28 +-
.../test/pages/workflow/SchedulerPage.java | 4 +-
...nPage.java => WorkflowDefinitionPage.java} | 36 +-
...ncePage.java => WorkflowInstancePage.java} | 22 +-
.../test/resources/workflow-json/test.json | 10 +-
dolphinscheduler-api-test/pom.xml | 13 +-
.../audit/constants/AuditLogConstants.java | 7 +-
.../api/audit/enums/AuditType.java | 33 +-
.../impl/ScheduleAuditOperatorImpl.java | 2 +-
.../impl/WorkflowAuditOperatorImpl.java | 4 +-
...=> WorkflowInstanceAuditOperatorImpl.java} | 4 +-
.../controller/DataAnalysisController.java | 4 +-
.../api/controller/ExecutorController.java | 100 +-
.../api/controller/SchedulerController.java | 72 +-
.../api/controller/TaskGroupController.java | 8 +-
.../controller/TaskInstanceController.java | 18 +-
.../WorkflowDefinitionController.java | 16 +-
.../WorkflowInstanceController.java | 39 +-
.../controller/WorkflowLineageController.java | 16 +-
.../WorkflowTaskRelationController.java | 46 +-
.../controller/v2/StatisticsV2Controller.java | 6 +-
.../v2/TaskInstanceV2Controller.java | 8 +-
... => WorkflowTaskRelationV2Controller.java} | 13 +-
.../controller/v2/WorkflowV2Controller.java | 6 +-
.../dolphinscheduler/api/dto/ClusterDto.java | 9 +-
.../api/dto/DagDataSchedule.java | 22 +-
.../api/dto/DefineUserDto.java | 21 +-
.../api/dto/DynamicSubWorkflowDto.java | 2 +-
.../api/dto/WorkflowDefinitionDto.java | 26 -
.../api/dto/WorkflowInstanceDto.java | 26 -
.../dto/schedule/ScheduleCreateRequest.java | 14 +-
.../dto/schedule/ScheduleFilterRequest.java | 8 +-
.../dto/schedule/ScheduleUpdateRequest.java | 6 +-
.../TaskInstanceQueryRequest.java | 14 +-
.../TaskRelationCreateRequest.java | 7 +-
.../TaskRelationFilterRequest.java | 2 +-
.../dto/workflow/WorkflowCreateRequest.java | 2 +-
.../dto/workflow/WorkflowUpdateRequest.java | 8 +-
.../WorkflowInstanceQueryRequest.java | 2 +-
.../api/python/PythonGateway.java | 32 +-
.../api/service/impl/ExecutorServiceImpl.java | 59 +-
.../api/service/impl/ProjectServiceImpl.java | 2 +-
.../service/impl/SchedulerServiceImpl.java | 47 +-
.../impl/TaskDefinitionServiceImpl.java | 16 +-
.../service/impl/TaskInstanceServiceImpl.java | 6 +-
.../api/service/impl/TenantServiceImpl.java | 2 +-
.../service/impl/WorkerGroupServiceImpl.java | 6 +-
.../impl/WorkflowDefinitionServiceImpl.java | 64 +-
.../impl/WorkflowInstanceServiceImpl.java | 76 +-
.../impl/WorkflowLineageServiceImpl.java | 50 +-
.../impl/WorkflowTaskRelationServiceImpl.java | 57 +-
.../BackfillWorkflowRequestTransformer.java | 2 +-
.../dolphinscheduler/api/vo/ScheduleVO.java | 72 +-
.../api/vo/TaskDefinitionVO.java | 8 +-
.../resources/dynamic-task-type-config.yaml | 2 +-
.../resources/i18n/messages_en_US.properties | 6 +-
.../resources/i18n/messages_zh_CN.properties | 8 +-
.../src/main/resources/task-type-config.yaml | 2 +-
.../DataAnalysisControllerTest.java | 5 +-
.../controller/SchedulerControllerTest.java | 8 +-
.../controller/WorkerGroupControllerTest.java | 2 +-
.../WorkflowInstanceControllerTest.java | 32 +-
...InstanceExecuteFunctionControllerTest.java | 43 +-
.../WorkflowTaskRelationControllerTest.java | 7 +-
.../v2/TaskInstanceV2ControllerTest.java | 10 +-
.../api/service/SchedulerServiceTest.java | 12 +-
.../TaskDefinitionServiceImplTest.java | 23 +-
.../api/service/TaskInstanceServiceTest.java | 8 +-
.../WorkflowDefinitionServiceTest.java | 30 +-
...lowInstanceExecuteFunctionServiceTest.java | 24 +-
.../service/WorkflowInstanceServiceTest.java | 162 ++--
.../WorkflowTaskLineageServiceTest.java | 9 +-
.../WorkflowTaskRelationServiceTest.java | 179 ++--
.../common/constants/CommandKeyConstants.java | 2 +-
.../common/constants/Constants.java | 8 +-
.../sql/ClasspathSqlScriptParserTest.java | 2 +-
.../common/utils/CodeGenerateUtilsTest.java | 4 +-
.../src/test/resources/sql/mysql_ddl.sql | 8 +-
.../src/test/resources/sql/mysql_dml.sql | 4 +-
.../apache/dolphinscheduler/dao/AlertDao.java | 50 +-
.../dolphinscheduler/dao/entity/Alert.java | 57 +-
.../dolphinscheduler/dao/entity/Command.java | 30 +-
.../dolphinscheduler/dao/entity/DagData.java | 39 +-
.../dao/entity/DependentLineageTask.java | 4 +-
.../entity/DependentWorkflowDefinition.java | 27 +-
.../dao/entity/DqExecuteResult.java | 105 +--
.../dao/entity/DqTaskStatisticsValue.java | 57 +-
.../dao/entity/ErrorCommand.java | 73 +-
.../dolphinscheduler/dao/entity/Schedule.java | 66 +-
.../dao/entity/TaskGroupQueue.java | 53 +-
.../dao/entity/TaskInstance.java | 134 +--
.../UserWithWorkflowDefinitionCode.java | 4 +-
.../dao/entity/WorkflowAlertContent.java | 58 +-
.../dao/entity/WorkflowDefinition.java | 68 +-
.../dao/entity/WorkflowDefinitionLog.java | 8 +-
.../dao/entity/WorkflowInstance.java | 69 +-
.../dao/entity/WorkflowInstanceRelation.java | 52 +-
.../dao/entity/WorkflowTaskLineage.java | 2 +-
.../dao/entity/WorkflowTaskRelation.java | 57 +-
.../dao/entity/WorkflowTaskRelationLog.java | 34 +-
.../dao/mapper/ProjectMapper.java | 6 +-
.../dao/mapper/RelationSubWorkflowMapper.java | 7 +-
.../dao/mapper/ScheduleMapper.java | 34 +-
.../dao/mapper/TaskDefinitionMapper.java | 6 +-
.../dao/mapper/TaskGroupQueueMapper.java | 2 +-
.../dao/mapper/TaskInstanceMapper.java | 22 +-
.../dao/mapper/UserMapper.java | 4 +-
.../mapper/WorkflowDefinitionLogMapper.java | 10 +-
.../dao/mapper/WorkflowDefinitionMapper.java | 26 +-
.../dao/mapper/WorkflowInstanceMapper.java | 124 ++-
.../WorkflowInstanceRelationMapper.java | 12 +-
.../dao/mapper/WorkflowTaskLineageMapper.java | 2 +-
.../mapper/WorkflowTaskRelationLogMapper.java | 31 +-
.../mapper/WorkflowTaskRelationMapper.java | 86 +-
.../dao/repository/TaskDefinitionDao.java | 4 +-
.../impl/TaskDefinitionDaoImpl.java | 11 +-
.../impl/TaskDefinitionLogDaoImpl.java | 2 +-
.../repository/impl/TaskInstanceDaoImpl.java | 6 +-
.../impl/WorkflowDefinitionLogDaoImpl.java | 2 +-
.../impl/WorkflowInstanceDaoImpl.java | 14 +-
.../impl/WorkflowTaskRelationLogDaoImpl.java | 2 +-
.../dao/utils/TaskInstanceUtils.java | 8 +-
.../dao/mapper/AlertMapper.xml | 6 +-
.../dao/mapper/CommandMapper.xml | 22 +-
.../dao/mapper/DqExecuteResultMapper.xml | 28 +-
.../dao/mapper/ErrorCommandMapper.xml | 19 +-
.../dao/mapper/ProjectMapper.xml | 14 +-
.../dao/mapper/RelationSubWorkflowMapper.xml | 2 +-
.../dao/mapper/ScheduleMapper.xml | 68 +-
.../dao/mapper/TaskDefinitionMapper.xml | 10 +-
.../dao/mapper/TaskGroupQueueMapper.xml | 22 +-
.../dao/mapper/TaskInstanceMapper.xml | 44 +-
.../dao/mapper/UserMapper.xml | 14 +-
.../mapper/WorkflowDefinitionLogMapper.xml | 22 +-
.../dao/mapper/WorkflowDefinitionMapper.xml | 42 +-
.../dao/mapper/WorkflowInstanceMapper.xml | 155 ++--
.../mapper/WorkflowInstanceRelationMapper.xml | 28 +-
.../dao/mapper/WorkflowTaskLineageMapper.xml | 20 +-
.../mapper/WorkflowTaskRelationLogMapper.xml | 54 +-
.../dao/mapper/WorkflowTaskRelationMapper.xml | 147 ++-
.../resources/sql/dolphinscheduler_h2.sql | 173 ++--
.../resources/sql/dolphinscheduler_mysql.sql | 200 ++--
.../sql/dolphinscheduler_postgresql.sql | 168 ++--
.../mysql/dolphinscheduler_ddl.sql | 82 +-
.../mysql/dolphinscheduler_dml.sql | 7 +
.../postgresql/dolphinscheduler_ddl.sql | 89 +-
.../postgresql/dolphinscheduler_dml.sql | 7 +
.../dao/entity/ErrorCommandTest.java | 16 +-
.../dao/mapper/CommandMapperTest.java | 14 +-
.../dao/mapper/ErrorCommandMapperTest.java | 4 +-
.../dao/mapper/ScheduleMapperTest.java | 25 +-
.../dao/mapper/TaskDefinitionMapperTest.java | 4 +-
.../dao/mapper/TaskGroupQueueMapperTest.java | 2 +-
.../dao/mapper/TaskInstanceMapperTest.java | 31 +-
.../dao/mapper/UserMapperTest.java | 4 +-
.../WorkflowDefinitionLogMapperTest.java | 8 +-
.../mapper/WorkflowInstanceMapMapperTest.java | 20 +-
.../mapper/WorkflowInstanceMapperTest.java | 35 +-
.../mapper/WorkflowTaskLineageMapperTest.java | 4 +-
.../WorkflowTaskRelationLogMapperTest.java | 8 +-
.../WorkflowTaskRelationMapperTest.java | 10 +-
.../repository/impl/CommandDaoImplTest.java | 8 +-
.../impl/TaskGroupQueueDaoImplTest.java | 2 +-
.../impl/WorkflowInstanceDaoImplTest.java | 4 +-
.../cases/ClickhouseDataSourceE2ETest.java | 2 +
.../e2e/cases/ClusterE2ETest.java | 2 +
.../e2e/cases/EnvironmentE2ETest.java | 2 +
.../e2e/cases/FileManageE2ETest.java | 2 +
.../e2e/cases/HiveDataSourceE2ETest.java | 2 +
.../e2e/cases/MysqlDataSourceE2ETest.java | 2 +
.../e2e/cases/PostgresDataSourceE2ETest.java | 2 +
.../e2e/cases/ProjectE2ETest.java | 2 +
.../e2e/cases/QueueE2ETest.java | 2 +
.../e2e/cases/SqlServerDataSourceE2ETest.java | 2 +
.../e2e/cases/TenantE2ETest.java | 2 +
.../e2e/cases/TokenE2ETest.java | 2 +
.../e2e/cases/UserE2ETest.java | 2 +
.../e2e/cases/WorkerGroupE2ETest.java | 2 +
.../e2e/cases/WorkflowE2ETest.java | 6 +-
.../e2e/cases/WorkflowHttpTaskE2ETest.java | 3 +
.../e2e/cases/WorkflowJavaTaskE2ETest.java | 2 +
.../e2e/cases/WorkflowSwitchE2ETest.java | 2 +
.../e2e/cases/tasks/PythonTaskE2ETest.java | 2 +
.../e2e/cases/tasks/ShellTaskE2ETest.java | 2 +
.../e2e/pages/project/ProjectDetailPage.java | 8 +-
.../project/workflow/TaskInstanceTab.java | 2 +-
.../workflow/WorkflowDefinitionTab.java | 10 +-
.../pages/project/workflow/WorkflowForm.java | 4 +-
dolphinscheduler-e2e/pom.xml | 7 +
.../master/engine/TaskGroupCoordinator.java | 6 +-
.../handler/AbstractCommandHandler.java | 4 +-
.../handler/ReRunWorkflowCommandHandler.java | 2 +-
.../RecoverFailureTaskCommandHandler.java | 2 +-
.../handler/RunWorkflowCommandHandler.java | 2 +-
.../WorkflowFailoverCommandHandler.java | 2 +-
.../runnable/AbstractTaskInstanceFactory.java | 8 +-
.../runnable/TaskExecutionContextBuilder.java | 10 +-
.../task/runnable/TaskExecutionRunnable.java | 4 +-
.../WorkflowSuccessLifecycleListener.java | 8 +-
.../trigger/WorkflowBackfillTrigger.java | 16 +-
...flowInstanceRecoverFailureTaskTrigger.java | 6 +-
...flowInstanceRecoverSuspendTaskTrigger.java | 6 +-
.../WorkflowInstanceRepeatTrigger.java | 6 +-
.../trigger/WorkflowManualTrigger.java | 16 +-
.../trigger/WorkflowScheduleTrigger.java | 16 +-
.../master/failover/WorkflowFailover.java | 6 +-
.../master/metrics/MasterServerMetrics.java | 2 +-
.../server/master/metrics/TaskMetrics.java | 2 +-
.../registry/MasterWaitingStrategy.java | 2 +-
...TaskInstanceDispatchOperationFunction.java | 2 +-
...icITaskInstancePauseOperationFunction.java | 2 +-
.../AsyncMasterTaskDelayQueueLooper.java | 2 +-
.../runner/execute/MasterTaskExecutor.java | 4 +-
...askInstanceExecuteDispatchEventSender.java | 2 +-
...TaskInstanceExecuteRunningEventSender.java | 2 +-
...askInstanceExecutionFailedEventSender.java | 2 +-
...askInstanceExecutionKilledEventSender.java | 2 +-
...askInstanceExecutionPausedEventSender.java | 2 +-
...skInstanceExecutionSuccessEventSender.java | 2 +-
.../task/condition/ConditionLogicTask.java | 2 +-
.../ConditionLogicTaskPluginFactory.java | 2 +-
.../DependentAsyncTaskExecuteFunction.java | 2 +-
.../DependentLogicTaskPluginFactory.java | 2 +-
.../DynamicAsyncTaskExecuteFunction.java | 4 +-
.../task/dynamic/DynamicCommandUtils.java | 6 +-
.../runner/task/dynamic/DynamicLogicTask.java | 16 +-
.../task/fake/LogicFakeTaskPluginFactory.java | 4 +-
.../subworkflow/SubWorkflowLogicTask.java | 20 +-
.../SubWorkflowLogicTaskPluginFactory.java | 2 +-
.../trigger/SubWorkflowManualTrigger.java | 2 +-
.../SwitchLogicTaskPluginFactory.java | 2 +-
.../server/master/utils/DependentExecute.java | 16 +-
.../master/utils/WorkflowInstanceUtils.java | 2 +-
.../server/master/ParamsTest.java | 2 +-
.../server/master/it/Repository.java | 4 +-
.../it/cases/WorkflowInstancePauseIT.java | 4 +-
.../cases/WorkflowInstanceRecoverPauseIT.java | 2 +-
.../cases/WorkflowInstanceRecoverStopIT.java | 2 +-
.../it/cases/WorkflowInstanceStopIT.java | 4 +-
.../master/it/cases/WorkflowSchedulingIT.java | 4 +-
.../master/it/cases/WorkflowStartIT.java | 8 +-
.../GlobalTaskDispatchWaitingQueueTest.java | 2 +-
.../task/dynamic/DynamicCommandUtilsTest.java | 8 +-
.../utils/WorkflowInstanceUtilsTest.java | 2 +-
...rkflow_with_sub_workflow_task_success.yaml | 16 +-
...rkflow_with_sub_workflow_task_success.yaml | 16 +-
...rkflow_with_sub_workflow_task_success.yaml | 16 +-
...orkflow_with_sub_workflow_task_failed.yaml | 12 +-
...rkflow_with_sub_workflow_task_success.yaml | 12 +-
...rkflow_with_sub_workflow_task_success.yaml | 16 +-
.../scheduler/quartz/ProcessScheduleTask.java | 4 +-
.../service/alert/WorkflowAlertManager.java | 86 +-
.../service/command/CommandServiceImpl.java | 4 +-
.../service/cron/CronUtils.java | 2 +-
.../service/expand/CuringParamsService.java | 8 +-
.../expand/CuringParamsServiceImpl.java | 17 +-
.../TimePlaceholderResolverExpandService.java | 2 +-
...ePlaceholderResolverExpandServiceImpl.java | 2 +-
.../service/process/ProcessService.java | 83 +-
.../service/process/ProcessServiceImpl.java | 878 ++++--------------
.../{ProcessDag.java => WorkflowDag.java} | 47 +-
.../StandByTaskInstancePriorityQueue.java | 2 +-
.../service/queue/TaskPriority.java | 139 +--
.../subworkflow/SubWorkflowServiceImpl.java | 6 +-
.../service/utils/DagHelper.java | 68 +-
.../service/utils/ParamUtils.java | 16 +-
.../service/utils/ProcessUtils.java | 6 +-
.../alert/WorkflowAlertManagerTest.java | 8 +-
.../command/MessageServiceImplTest.java | 4 +-
.../expand/CuringParamsServiceTest.java | 8 +-
.../service/process/ProcessServiceTest.java | 76 +-
.../StandByTaskInstancePriorityQueueTest.java | 2 +-
.../service/utils/DagHelperTest.java | 46 +-
.../src/main/resources/application.yaml | 8 +-
.../plugin/task/api/TaskExecutionContext.java | 126 +--
.../task/api/k8s/impl/K8sTaskExecutor.java | 6 +-
.../api/parameters/DynamicParameters.java | 9 +-
...meters.java => SubWorkflowParameters.java} | 17 +-
.../api/task/SubWorkflowLogicTaskChannel.java | 4 +-
.../SubWorkflowLogicTaskChannelFactory.java | 2 +-
.../plugin/task/api/utils/LogUtils.java | 6 +-
.../plugin/task/dq/DataQualityTask.java | 8 +-
.../tools/demo/ProcessDefinitionDemo.java | 4 +-
.../tools/lineage/MigrateLineageService.java | 4 +-
.../{sub_process.png => sub_workflow.png} | Bin
...ocess_hover.png => sub_workflow_hover.png} | Bin
dolphinscheduler-ui/src/locales/en_US/home.ts | 4 +-
.../src/locales/en_US/project.ts | 12 +-
dolphinscheduler-ui/src/locales/zh_CN/home.ts | 4 +-
.../src/locales/zh_CN/project.ts | 10 +-
.../src/service/modules/audit/types.ts | 2 +-
.../src/service/modules/data-quality/types.ts | 10 +-
.../src/service/modules/executors/index.ts | 8 +-
.../src/service/modules/executors/types.ts | 16 +-
.../modules/projects-analysis/index.ts | 4 +-
.../modules/projects-analysis/types.ts | 4 +-
.../src/service/modules/schedules/index.ts | 6 +-
.../src/service/modules/schedules/types.ts | 14 +-
.../src/service/modules/task-group/types.ts | 4 +-
.../service/modules/task-instances/types.ts | 18 +-
.../index.ts | 87 +-
.../types.ts | 4 +-
.../index.ts | 46 +-
.../types.ts | 16 +-
.../index.ts | 18 +-
.../types.ts | 8 +-
.../src/store/project/task-node.ts | 6 +-
.../src/store/project/task-type.ts | 4 +-
.../src/store/project/types.ts | 2 +-
.../data-quality/task-result/use-table.ts | 8 +-
.../views/home/components/definition-card.tsx | 18 +-
dolphinscheduler-ui/src/views/home/index.tsx | 37 +-
...finition.ts => use-workflow-definition.ts} | 10 +-
...process-state.ts => use-workflow-state.ts} | 20 +-
.../statistics/list-command-table.tsx | 8 +-
.../statistics/list-error-command-table.tsx | 8 +-
.../dependencies/use-dependencies.ts | 22 +-
.../overview/components/definition-card.tsx | 18 +-
.../src/views/projects/overview/index.tsx | 36 +-
...finition.ts => use-workflow-definition.ts} | 10 +-
...process-state.ts => use-workflow-state.ts} | 20 +-
.../task/components/node/detail-modal.tsx | 20 +-
.../task/components/node/fields/index.ts | 2 +-
.../components/node/fields/use-child-node.ts | 24 +-
.../components/node/fields/use-dependent.ts | 42 +-
.../task/components/node/fields/use-switch.ts | 10 +-
.../node/fields/use-task-definition.ts | 6 +-
...e-process-name.ts => use-workflow-name.ts} | 28 +-
.../task/components/node/format-data.ts | 14 +-
.../task/components/node/tasks/index.ts | 4 +-
.../components/node/tasks/use-datasync.ts | 4 +-
.../task/components/node/tasks/use-dms.ts | 4 +-
.../task/components/node/tasks/use-dynamic.ts | 2 +-
.../components/node/tasks/use-hive-cli.ts | 4 +-
.../task/components/node/tasks/use-java.ts | 4 +-
.../task/components/node/tasks/use-pytorch.ts | 4 +-
...use-sub-process.ts => use-sub-workflow.ts} | 6 +-
.../projects/task/components/node/types.ts | 4 +-
.../projects/task/constants/task-type.ts | 6 +-
.../projects/task/instance/batch-task.tsx | 14 +-
.../projects/task/instance/stream-task.tsx | 4 +-
.../src/views/projects/task/instance/types.ts | 2 +-
.../task/instance/use-stream-table.ts | 12 +-
.../views/projects/task/instance/use-table.ts | 24 +-
.../components/dag/dag-save-modal.tsx | 40 +-
.../components/dag/dag-startup-param.tsx | 2 +-
.../workflow/components/dag/dag-toolbar.tsx | 14 +-
.../workflow/components/dag/dag.module.scss | 8 +-
.../workflow/components/dag/index.tsx | 20 +-
.../projects/workflow/components/dag/types.ts | 18 +-
.../components/dag/use-business-mapper.ts | 2 +-
.../components/dag/use-custom-cell-builder.ts | 4 +-
.../components/dag/use-node-status.ts | 4 +-
.../workflow/components/dag/use-task-edit.ts | 54 +-
.../definition/components/start-modal.tsx | 6 +-
.../definition/components/timing-modal.tsx | 10 +-
.../workflow/definition/components/types.ts | 2 +-
.../definition/components/use-form.ts | 6 +-
.../definition/components/use-modal.ts | 26 +-
.../definition/components/use-table.ts | 2 +-
.../workflow/definition/create/index.tsx | 4 +-
.../workflow/definition/detail/index.tsx | 14 +-
.../projects/workflow/definition/index.tsx | 2 +-
.../workflow/definition/timing/index.tsx | 2 +-
.../workflow/definition/timing/types.ts | 2 +-
.../workflow/definition/timing/use-table.ts | 18 +-
.../workflow/definition/tree/index.tsx | 6 +-
.../projects/workflow/definition/types.ts | 6 +-
.../projects/workflow/definition/use-table.ts | 4 +-
.../instance/components/table-action.tsx | 6 +-
.../instance/components/variables-view.tsx | 8 +-
...on.tsx => workflow-instance-condition.tsx} | 36 +-
.../workflow/instance/detail/index.tsx | 12 +-
.../workflow/instance/gantt/use-gantt.ts | 2 +-
.../projects/workflow/instance/index.tsx | 6 +-
.../views/projects/workflow/instance/types.ts | 2 +-
.../projects/workflow/instance/use-table.ts | 39 +-
.../timing/components/timing-condition.tsx | 34 +-
.../views/projects/workflow/timing/index.tsx | 4 +-
.../views/projects/workflow/timing/types.ts | 2 +-
.../views/resource/task-group/queue/index.tsx | 6 +-
.../resource/task-group/queue/use-table.ts | 2 +-
.../TaskExecutionDispatchEventSender.java | 2 +-
.../TaskExecutionFailedEventSender.java | 2 +-
.../TaskExecutionKilledEventSender.java | 2 +-
.../TaskExecutionPausedEventSender.java | 2 +-
.../TaskExecutionSuccessEventSender.java | 2 +-
...skInstanceExecutionRunningEventSender.java | 2 +-
.../worker/runner/WorkerTaskExecutor.java | 4 +-
...TaskInstanceDispatchOperationFunction.java | 4 +-
.../utils/TaskExecutionContextUtils.java | 6 +-
.../worker/utils/TaskFilesTransferUtils.java | 4 +-
.../runner/DefaultWorkerTaskExecutorTest.java | 4 +-
.../utils/TaskExecutionContextUtilsTest.java | 12 +-
.../utils/TaskFilesTransferUtilsTest.java | 18 +-
431 files changed, 3939 insertions(+), 5249 deletions(-)
create mode 100644 .github/workflows/schema-check/mysql/docker-compose-base.yaml
create mode 100644 .github/workflows/schema-check/mysql/running-test.sh
create mode 100644 .github/workflows/schema-check/mysql/start-job.sh
create mode 100644 .github/workflows/schema-check/postgresql/docker-compose-base.yaml
create mode 100644 .github/workflows/schema-check/postgresql/running-test.sh
create mode 100644 .github/workflows/schema-check/postgresql/start-job.sh
rename docs/img/tasks/icons/{sub_process.png => sub_workflow.png} (100%)
rename dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/{ProcessDefinitionPage.java => WorkflowDefinitionPage.java} (71%)
rename dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/{ProcessInstancePage.java => WorkflowInstancePage.java} (73%)
rename dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/{ProcessInstanceAuditOperatorImpl.java => WorkflowInstanceAuditOperatorImpl.java} (94%)
rename dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/{ProcessTaskRelationV2Controller.java => WorkflowTaskRelationV2Controller.java} (94%)
delete mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowDefinitionDto.java
delete mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowInstanceDto.java
rename dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/{ProcessDag.java => WorkflowDag.java} (62%)
rename dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/{SubProcessParameters.java => SubWorkflowParameters.java} (70%)
rename dolphinscheduler-ui/public/images/task-icons/{sub_process.png => sub_workflow.png} (100%)
rename dolphinscheduler-ui/public/images/task-icons/{sub_process_hover.png => sub_workflow_hover.png} (100%)
rename dolphinscheduler-ui/src/service/modules/{process-definition => workflow-definition}/index.ts (58%)
rename dolphinscheduler-ui/src/service/modules/{process-definition => workflow-definition}/types.ts (96%)
rename dolphinscheduler-ui/src/service/modules/{process-instances => workflow-instances}/index.ts (61%)
rename dolphinscheduler-ui/src/service/modules/{process-instances => workflow-instances}/types.ts (89%)
rename dolphinscheduler-ui/src/service/modules/{process-task-relation => workflow-task-relation}/index.ts (75%)
rename dolphinscheduler-ui/src/service/modules/{process-task-relation => workflow-task-relation}/types.ts (87%)
rename dolphinscheduler-ui/src/views/home/{use-process-definition.ts => use-workflow-definition.ts} (83%)
rename dolphinscheduler-ui/src/views/home/{use-process-state.ts => use-workflow-state.ts} (79%)
rename dolphinscheduler-ui/src/views/projects/overview/{use-process-definition.ts => use-workflow-definition.ts} (83%)
rename dolphinscheduler-ui/src/views/projects/overview/{use-process-state.ts => use-workflow-state.ts} (80%)
rename dolphinscheduler-ui/src/views/projects/task/components/node/fields/{use-process-name.ts => use-workflow-name.ts} (80%)
rename dolphinscheduler-ui/src/views/projects/task/components/node/tasks/{use-sub-process.ts => use-sub-workflow.ts} (94%)
rename dolphinscheduler-ui/src/views/projects/workflow/instance/components/{process-instance-condition.tsx => workflow-instance-condition.tsx} (84%)
diff --git a/.github/workflows/api-test.yml b/.github/workflows/api-test.yml
index 900dab8d7e..90a2e9aa17 100644
--- a/.github/workflows/api-test.yml
+++ b/.github/workflows/api-test.yml
@@ -89,26 +89,35 @@ jobs:
strategy:
matrix:
case:
- - name: Tenant
+ - name: TenantAPITest
class: org.apache.dolphinscheduler.api.test.cases.TenantAPITest
- - name: WorkerGroup
+ - name: WorkerGroupAPITest
class: org.apache.dolphinscheduler.api.test.cases.WorkerGroupAPITest
- - name: Project
+ - name: ProjectAPITest
class: org.apache.dolphinscheduler.api.test.cases.ProjectAPITest
- - name: Workflow
- class: org.apache.dolphinscheduler.api.test.cases.ProcessDefinitionAPITest
- - name: Scheduler
+ - name: WorkflowDefinitionAPITest
+ class: org.apache.dolphinscheduler.api.test.cases.WorkflowDefinitionAPITest
+ - name: SchedulerAPITest
class: org.apache.dolphinscheduler.api.test.cases.SchedulerAPITest
- - name: Executor
+ - name: ExecutorAPITest
class: org.apache.dolphinscheduler.api.test.cases.ExecutorAPITest
- - name: ProcessInstance
- class: org.apache.dolphinscheduler.api.test.cases.ProcessInstanceAPITest
+ - name: WorkflowInstanceAPITest
+ class: org.apache.dolphinscheduler.api.test.cases.WorkflowInstanceAPITest
env:
RECORDING_PATH: /tmp/recording-${{ matrix.case.name }}
steps:
- uses: actions/checkout@v4
with:
submodules: true
+ - name: Set up JDK 11
+ uses: actions/setup-java@v4
+ with:
+ java-version: 11
+ distribution: 'adopt'
+ - name: Collect Workflow Telemetry
+ uses: ./.github/actions/workflow-telemetry-action
+ with:
+ comment_on_pr: false
- name: Cache local Maven repository
uses: actions/cache@v4
with:
diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml
index 52ccc6c431..0e98226e2a 100644
--- a/.github/workflows/backend.yml
+++ b/.github/workflows/backend.yml
@@ -132,136 +132,35 @@ jobs:
run: |
/bin/bash ${{ matrix.case.script }}
schema-check:
+ name: ${{ matrix.case.name }}-${{ matrix.version }}
+ needs: build
runs-on: ubuntu-latest
- if: ${{ (needs.paths-filter.outputs.db-schema == 'true') || (github.event_name == 'push') }}
timeout-minutes: 20
- needs: build
- services:
- mysql:
- image: mysql:5.7
- env:
- MYSQL_ROOT_PASSWORD: mysql
- MYSQL_DATABASE: dolphinscheduler_dev
- ports:
- - 3306:3306
- options: --name=mysql --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3
- postgres:
- image: postgres:15
- env:
- POSTGRES_PASSWORD: postgres
- POSTGRES_DB: dolphinscheduler_dev
- ports:
- - 5432:5432
- options: --name=postgres --health-cmd=pg_isready --health-interval=10s --health-timeout=5s --health-retries=5
strategy:
fail-fast: false
matrix:
- db: ["mysql", "postgresql"]
version: ["3.1.9", "3.2.0"]
+ case:
+ - name: schema-check-with-mysql
+ script: .github/workflows/schema-check/mysql/start-job.sh
+ - name: schema-check-with-postgresql
+ script: .github/workflows/schema-check/postgresql/start-job.sh
steps:
- - name: Set up JDK 8
- uses: actions/setup-java@v4
+ - uses: actions/checkout@v4
with:
- java-version: 8
- distribution: 'adopt'
- - name: Install Atlas and Create Dir
- run: |
- mkdir -p dolphinscheduler/dev dolphinscheduler/${{ matrix.version }}
- curl -sSf https://atlasgo.sh | sh
- - name: Download Tarball
+ submodules: true
+ - name: Collect Workflow Telemetry
+ uses: ./.github/actions/workflow-telemetry-action
+ with:
+ comment_on_pr: false
+ - name: Download Binary Package
uses: actions/download-artifact@v4
with:
name: binary-package-8
- path: dolphinscheduler/dev
- - name: Set Env
- run: |
- VERSION=${{ matrix.version }}
- echo "DATABASE_VERSION=${VERSION//\./}" >> $GITHUB_ENV
- - name: Prepare
- run: |
- wget https://archive.apache.org/dist/dolphinscheduler/${{ matrix.version }}/apache-dolphinscheduler-${{ matrix.version }}-bin.tar.gz -P dolphinscheduler/${{ matrix.version }}
- tar -xzf dolphinscheduler/${{ matrix.version }}/apache-dolphinscheduler-${{ matrix.version }}-bin.tar.gz -C dolphinscheduler/${{ matrix.version }} --strip-components 1
- tar -xzf dolphinscheduler/dev/apache-dolphinscheduler-*-bin.tar.gz -C dolphinscheduler/dev --strip-components 1
-
- if [[ ${{ matrix.db }} == "mysql" ]]; then
- MYSQL_JDBC_URL="https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar"
- MYSQL_JDBC_JAR="mysql-connector-java-8.0.16.jar"
- wget ${MYSQL_JDBC_URL} -O /tmp/${MYSQL_JDBC_JAR}
- for base_dir in dolphinscheduler/dev dolphinscheduler/${{ matrix.version }}; do
- if [[ $base_dir == *"dolphinscheduler/2"* ]]; then
- cp /tmp/${MYSQL_JDBC_JAR} ${base_dir}/lib
- else
- for d in alert-server api-server master-server worker-server tools; do
- cp /tmp/${MYSQL_JDBC_JAR} ${base_dir}/${d}/libs
- done
- fi
- done
- docker exec -i mysql mysql -uroot -pmysql -e "create database dolphinscheduler_${{ env.DATABASE_VERSION }}";
- else
- docker exec -i postgres psql -U postgres -c "create database dolphinscheduler_${{ env.DATABASE_VERSION }};"
- fi
- - name: Check
+ path: ds_schema_check_test/dev
+ - name: Running Schema Check
run: |
- if [[ $DATABASE_VERSION -lt 300 ]]; then
- chmod +x dolphinscheduler/dev/tools/bin/upgrade-schema.sh dolphinscheduler/${{ matrix.version }}/script/create-dolphinscheduler.sh
- else
- chmod +x dolphinscheduler/dev/tools/bin/upgrade-schema.sh dolphinscheduler/${{ matrix.version }}/tools/bin/upgrade-schema.sh
- fi
- if [[ ${{ matrix.db }} == "mysql" ]]; then
- export DATABASE="mysql"
- export SPRING_DATASOURCE_DRIVER_CLASS_NAME="com.mysql.cj.jdbc.Driver"
- export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_dev?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false"
- export SPRING_DATASOURCE_USERNAME="root"
- export SPRING_DATASOURCE_PASSWORD="mysql"
- bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh
-
- export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_${{ env.DATABASE_VERSION }}?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false"
- if [[ $DATABASE_VERSION -lt 300 ]]; then
- bash dolphinscheduler/${{ matrix.version }}/script/create-dolphinscheduler.sh
- else
- bash dolphinscheduler/${{ matrix.version }}/tools/bin/upgrade-schema.sh
- fi
- bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh
-
- atlas_result=$(atlas schema diff \
- --from "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_${{ env.DATABASE_VERSION }}" \
- --to "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_dev")
- if [[ ${atlas_result} != *"Schemas are synced"* ]]; then
- echo "================================================================================================"
- echo " !!!!! For Contributors !!!!!"
- echo "================================================================================================"
- echo "Database schema not sync, please add below change in the latest version of dolphinscheduler-dao/src/main/resources/sql/upgrade directory"
- echo "${atlas_result}"
- exit 1
- fi
- else
- export DATABASE="postgresql"
- export SPRING_DATASOURCE_DRIVER_CLASS_NAME="org.postgresql.Driver"
- export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_dev"
- export SPRING_DATASOURCE_USERNAME="postgres"
- export SPRING_DATASOURCE_PASSWORD="postgres"
- bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh
-
- export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_${{ env.DATABASE_VERSION }}"
- if [[ $DATABASE_VERSION -lt 300 ]]; then
- bash dolphinscheduler/${{ matrix.version }}/script/create-dolphinscheduler.sh
- else
- bash dolphinscheduler/${{ matrix.version }}/tools/bin/upgrade-schema.sh
- fi
- bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh
-
- atlas_result=$(atlas schema diff \
- --from "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_${{ env.DATABASE_VERSION }}?search_path=public&sslmode=disable" \
- --to "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_dev?search_path=public&sslmode=disable")
- if [[ ${atlas_result} != *"Schemas are synced"* ]]; then
- echo "================================================================================================"
- echo " !!!!! For Contributors !!!!!"
- echo "================================================================================================"
- echo "Database schema not sync, please add below change in the latest version in dolphinscheduler-dao/src/main/resources/sql/upgrade directory"
- echo "${atlas_result}"
- exit 1
- fi
- fi
+ /bin/bash ${{ matrix.case.script }} ${{ matrix.version }}
result:
name: Build
runs-on: ubuntu-latest
@@ -275,7 +174,7 @@ jobs:
echo "Skip Build!"
exit 0
fi
- if [[ ${{ needs.build.result }} != 'success' || ${{ needs.cluster-test.result }} != 'success' ]]; then
+ if [[ ${{ needs.build.result }} != 'success' || ${{ needs.cluster-test.result }} != 'success' || ${{ needs.schema-check.result }} != 'success' ]]; then
echo "Build Failed!"
exit -1
fi
diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml
index 565187572a..a24cf820f8 100644
--- a/.github/workflows/e2e.yml
+++ b/.github/workflows/e2e.yml
@@ -92,45 +92,45 @@ jobs:
strategy:
matrix:
case:
- - name: Tenant
+ - name: TenantE2ETest
class: org.apache.dolphinscheduler.e2e.cases.TenantE2ETest
- - name: User
+ - name: UserE2ETest
class: org.apache.dolphinscheduler.e2e.cases.UserE2ETest
- - name: WorkerGroup
+ - name: WorkerGroupE2ETest
class: org.apache.dolphinscheduler.e2e.cases.WorkerGroupE2ETest
- - name: Project
+ - name: ProjectE2ETest
class: org.apache.dolphinscheduler.e2e.cases.ProjectE2ETest
- - name: Queue
+ - name: QueueE2ETest
class: org.apache.dolphinscheduler.e2e.cases.QueueE2ETest
- - name: Environment
+ - name: EnvironmentE2ETest
class: org.apache.dolphinscheduler.e2e.cases.EnvironmentE2ETest
- - name: Cluster
+ - name: ClusterE2ETest
class: org.apache.dolphinscheduler.e2e.cases.ClusterE2ETest
- - name: Token
+ - name: TokenE2ETest
class: org.apache.dolphinscheduler.e2e.cases.TokenE2ETest
- - name: Workflow
+ - name: WorkflowE2ETest
class: org.apache.dolphinscheduler.e2e.cases.WorkflowE2ETest
- - name: WorkflowHttp
+ - name: WorkflowHttpTaskE2ETest
class: org.apache.dolphinscheduler.e2e.cases.WorkflowHttpTaskE2ETest
- - name: WorkflowJava
+ - name: WorkflowJavaTaskE2ETest
class: org.apache.dolphinscheduler.e2e.cases.WorkflowJavaTaskE2ETest
# - name: WorkflowForSwitch
# class: org.apache.dolphinscheduler.e2e.cases.WorkflowSwitchE2ETest
- - name: FileManage
+ - name: FileManageE2ETest
class: org.apache.dolphinscheduler.e2e.cases.FileManageE2ETest
- - name: MysqlDataSource
+ - name: MysqlDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.MysqlDataSourceE2ETest
- - name: ClickhouseDataSource
+ - name: ClickhouseDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.ClickhouseDataSourceE2ETest
- - name: PostgresDataSource
+ - name: PostgresDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.PostgresDataSourceE2ETest
- name: ShellTaskE2ETest
class: org.apache.dolphinscheduler.e2e.cases.tasks.ShellTaskE2ETest
- name: PythonTaskE2ETest
class: org.apache.dolphinscheduler.e2e.cases.tasks.PythonTaskE2ETest
- - name: SqlServerDataSource
+ - name: SqlServerDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.SqlServerDataSourceE2ETest
- - name: HiveDataSource
+ - name: HiveDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.HiveDataSourceE2ETest
env:
RECORDING_PATH: /tmp/recording-${{ matrix.case.name }}
diff --git a/.github/workflows/schema-check/mysql/docker-compose-base.yaml b/.github/workflows/schema-check/mysql/docker-compose-base.yaml
new file mode 100644
index 0000000000..d515a91762
--- /dev/null
+++ b/.github/workflows/schema-check/mysql/docker-compose-base.yaml
@@ -0,0 +1,35 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+version: "3"
+
+services:
+ mysql:
+ container_name: mysql
+ image: mysql:8.0.33
+ command: --default-authentication-plugin=mysql_native_password
+ restart: always
+ environment:
+ MYSQL_ROOT_PASSWORD: mysql
+ MYSQL_DATABASE: dolphinscheduler_dev
+ ports:
+ - "3306:3306"
+ healthcheck:
+ test: mysqladmin ping -h 127.0.0.1 -u root --password=$$MYSQL_ROOT_PASSWORD
+ interval: 5s
+ timeout: 60s
+ retries: 120
diff --git a/.github/workflows/schema-check/mysql/running-test.sh b/.github/workflows/schema-check/mysql/running-test.sh
new file mode 100644
index 0000000000..72e2fb3b6a
--- /dev/null
+++ b/.github/workflows/schema-check/mysql/running-test.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+set -euox pipefail
+
+DS_VERSION=$1
+DATABASE_VERSION=$2
+
+# Install dev schema
+export DATABASE="mysql"
+export SPRING_DATASOURCE_DRIVER_CLASS_NAME="com.mysql.cj.jdbc.Driver"
+export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_dev?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false"
+export SPRING_DATASOURCE_USERNAME="root"
+export SPRING_DATASOURCE_PASSWORD="mysql"
+bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh
+
+# Install the target version schema and upgrade it
+export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_${DATABASE_VERSION}?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false"
+bash ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh
+bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh
+
+# Compare the schema
+set +x
+atlas_result=$(atlas schema diff \
+ --from "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_${DATABASE_VERSION}" \
+ --to "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_dev")
+if [[ ${atlas_result} != *"Schemas are synced"* ]]; then
+ echo "================================================================================================"
+ echo " !!!!! For Contributors !!!!!"
+ echo "================================================================================================"
+ echo "Database schema not sync, please add below change in the latest version of dolphinscheduler-dao/src/main/resources/sql/upgrade directory"
+ echo "${atlas_result}"
+ exit 1
+else
+ echo "================================================================================================"
+ echo " !!!!! For Contributors !!!!!"
+ echo "================================================================================================"
+ echo "Database schema sync successfully"
+ exit 0
+fi
diff --git a/.github/workflows/schema-check/mysql/start-job.sh b/.github/workflows/schema-check/mysql/start-job.sh
new file mode 100644
index 0000000000..4ca8ee4810
--- /dev/null
+++ b/.github/workflows/schema-check/mysql/start-job.sh
@@ -0,0 +1,57 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+set -euox pipefail
+
+DS_VERSION=$1
+DATABASE_VERSION=${DS_VERSION//\./}
+
+# Install Atlas and Create Dir
+mkdir -p ds_schema_check_test/dev ds_schema_check_test/${DS_VERSION}
+curl -sSf https://atlasgo.sh | sh
+
+# Preparing the environment
+wget https://archive.apache.org/dist/dolphinscheduler/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -P ds_schema_check_test/${DS_VERSION}
+tar -xzf ds_schema_check_test/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -C ds_schema_check_test/${DS_VERSION} --strip-components 1
+tar -xzf ds_schema_check_test/dev/apache-dolphinscheduler-*-bin.tar.gz -C ds_schema_check_test/dev --strip-components 1
+
+if [[ $DATABASE_VERSION -lt 300 ]]; then
+ chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/script/create-dolphinscheduler.sh
+else
+ chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh
+fi
+
+MYSQL_JDBC_URL="https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar"
+MYSQL_JDBC_JAR="mysql-connector-java-8.0.16.jar"
+wget ${MYSQL_JDBC_URL} -O ds_schema_check_test/${MYSQL_JDBC_JAR}
+for base_dir in ds_schema_check_test/dev ds_schema_check_test/${DS_VERSION}; do
+ if [[ $base_dir == *"dolphinscheduler/2"* ]]; then
+ cp ds_schema_check_test/${MYSQL_JDBC_JAR} ${base_dir}/lib
+ else
+ for d in alert-server api-server master-server worker-server tools; do
+ cp ds_schema_check_test/${MYSQL_JDBC_JAR} ${base_dir}/${d}/libs
+ done
+ fi
+done
+docker compose -f .github/workflows/schema-check/mysql/docker-compose-base.yaml up -d --wait
+docker exec -i mysql mysql -uroot -pmysql -e "create database dolphinscheduler_${DATABASE_VERSION}";
+
+#Running schema check tests
+/bin/bash .github/workflows/schema-check/mysql/running-test.sh ${DS_VERSION} ${DATABASE_VERSION}
+
+#Cleanup
+docker compose -f .github/workflows/schema-check/mysql/docker-compose-base.yaml down -v --remove-orphans
diff --git a/.github/workflows/schema-check/postgresql/docker-compose-base.yaml b/.github/workflows/schema-check/postgresql/docker-compose-base.yaml
new file mode 100644
index 0000000000..9f09f0d326
--- /dev/null
+++ b/.github/workflows/schema-check/postgresql/docker-compose-base.yaml
@@ -0,0 +1,34 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+version: "3"
+
+services:
+ postgres:
+ container_name: postgres
+ image: postgres:14.1
+ restart: always
+ environment:
+ POSTGRES_PASSWORD: postgres
+ POSTGRES_DB: dolphinscheduler_dev
+ ports:
+ - "5432:5432"
+ healthcheck:
+ test: [ "CMD-SHELL", "pg_isready -U postgres" ]
+ interval: 5s
+ timeout: 60s
+ retries: 120
diff --git a/.github/workflows/schema-check/postgresql/running-test.sh b/.github/workflows/schema-check/postgresql/running-test.sh
new file mode 100644
index 0000000000..0118ca2477
--- /dev/null
+++ b/.github/workflows/schema-check/postgresql/running-test.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+set -euox pipefail
+
+DS_VERSION=$1
+DATABASE_VERSION=$2
+
+# Install dev schema
+export DATABASE="postgresql"
+export SPRING_DATASOURCE_DRIVER_CLASS_NAME="org.postgresql.Driver"
+export SPRING_DATASOURCE_USERNAME="postgres"
+export SPRING_DATASOURCE_PASSWORD="postgres"
+export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_dev"
+bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh
+
+# Install the target version schema and upgrade it
+export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_${DATABASE_VERSION}"
+bash ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh
+bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh
+
+# Compare the schema
+set +x
+atlas_result=$(atlas schema diff \
+ --from "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_${DATABASE_VERSION}?search_path=public&sslmode=disable" \
+ --to "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_dev?search_path=public&sslmode=disable")
+if [[ ${atlas_result} != *"Schemas are synced"* ]]; then
+ echo "================================================================================================"
+ echo " !!!!! For Contributors !!!!!"
+ echo "================================================================================================"
+ echo "Database schema not sync, please add below change in the latest version of dolphinscheduler-dao/src/main/resources/sql/upgrade directory"
+ echo "${atlas_result}"
+ exit 1
+else
+ echo "================================================================================================"
+ echo " !!!!! For Contributors !!!!!"
+ echo "================================================================================================"
+ echo "Database schema sync successfully"
+ exit 0
+fi
diff --git a/.github/workflows/schema-check/postgresql/start-job.sh b/.github/workflows/schema-check/postgresql/start-job.sh
new file mode 100644
index 0000000000..2d71794fe6
--- /dev/null
+++ b/.github/workflows/schema-check/postgresql/start-job.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+set -euox pipefail
+
+DS_VERSION=$1
+DATABASE_VERSION=${DS_VERSION//\./}
+
+# Install Atlas and Create Dir
+mkdir -p ds_schema_check_test/dev ds_schema_check_test/${DS_VERSION}
+curl -sSf https://atlasgo.sh | sh
+
+# Preparing the environment
+wget https://archive.apache.org/dist/dolphinscheduler/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -P ds_schema_check_test/${DS_VERSION}
+tar -xzf ds_schema_check_test/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -C ds_schema_check_test/${DS_VERSION} --strip-components 1
+tar -xzf ds_schema_check_test/dev/apache-dolphinscheduler-*-bin.tar.gz -C ds_schema_check_test/dev --strip-components 1
+
+if [[ $DATABASE_VERSION -lt 300 ]]; then
+ chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/script/create-dolphinscheduler.sh
+else
+ chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh
+fi
+
+docker compose -f .github/workflows/schema-check/postgresql/docker-compose-base.yaml up -d --wait
+docker exec -i postgres psql -U postgres -c "create database dolphinscheduler_${DATABASE_VERSION}";
+
+#Running schema check tests
+/bin/bash .github/workflows/schema-check/postgresql/running-test.sh ${DS_VERSION} ${DATABASE_VERSION}
+
+#Cleanup
+docker compose -f .github/workflows/schema-check/postgresql/docker-compose-base.yaml down -v --remove-orphans
diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml
index d5cd09d87f..c0bc86dbf0 100644
--- a/.github/workflows/unit-test.yml
+++ b/.github/workflows/unit-test.yml
@@ -100,8 +100,8 @@ jobs:
-Dsonar.projectKey=apache-dolphinscheduler
-Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682
-Dsonar.exclusions=dolphinscheduler-ui/src/**/i18n/locale/*.js,dolphinscheduler-microbench/src/**/*
- -Dhttp.keepAlive=false
- -Dmaven.wagon.http.pool=false
+ -Dhttp.keepAlive=false
+ -Dmaven.wagon.http.pool=false
-Dmaven.wagon.httpconnectionManager.ttlSeconds=120
-DskipUT=true
env:
diff --git a/.gitignore b/.gitignore
index 174ab57242..fcf292d66e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -54,3 +54,4 @@ dolphinscheduler-worker/logs
dolphinscheduler-master/logs
dolphinscheduler-api/logs
__pycache__
+ds_schema_check_test
diff --git a/docs/docs/en/about/glossary.md b/docs/docs/en/about/glossary.md
index e3cee76f14..db4e875354 100644
--- a/docs/docs/en/about/glossary.md
+++ b/docs/docs/en/about/glossary.md
@@ -19,8 +19,8 @@ manual start or scheduled scheduling. Each time the process definition runs, a p
**Task instance**: The task instance is the instantiation of the task node in the process definition, which identifies
the specific task
-**Task type**: Currently supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (
-depends), and plans to support dynamic plug-in expansion, note: **SUB_PROCESS** need relation with another workflow definition which also a separate process
+**Task type**: Currently supports SHELL, SQL, SUB_WORKFLOW, PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (
+depends), and plans to support dynamic plug-in expansion, note: **SUB_WORKFLOW** need relation with another workflow definition which also a separate process
definition that can be started and executed separately
**Scheduling method**: The system supports scheduled scheduling and manual scheduling based on cron expressions. Command
diff --git a/docs/docs/en/architecture/task-structure.md b/docs/docs/en/architecture/task-structure.md
index 041d753403..c9430d5068 100644
--- a/docs/docs/en/architecture/task-structure.md
+++ b/docs/docs/en/architecture/task-structure.md
@@ -919,7 +919,7 @@ No.|parameter name||type|description |notes
```bash
{
- "type":"SUB_PROCESS",
+ "type":"SUB_WORKFLOW",
"id":"tasks-14806",
"name":"SubProcessTask",
"params":{
diff --git a/docs/docs/en/contribute/architecture-design.md b/docs/docs/en/contribute/architecture-design.md
index 837f49ea90..a7b325bd08 100644
--- a/docs/docs/en/contribute/architecture-design.md
+++ b/docs/docs/en/contribute/architecture-design.md
@@ -19,7 +19,7 @@ Before explaining the architecture of the schedule system, let us first understa
**Task instance**: A task instance is the instantiation of a specific task node when a process instance runs, which indicates the specific task execution status
-**Task type**: Currently supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (dependency), and plans to support dynamic plug-in extension, note: the sub-**SUB_PROCESS** is also A separate process definition that can be launched separately
+**Task type**: Currently supports SHELL, SQL, SUB_WORKFLOW, PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (dependency), and plans to support dynamic plug-in extension, note: the sub-**SUB_WORKFLOW** is also A separate process definition that can be launched separately
**Schedule mode** : The system supports timing schedule and manual schedule based on cron expressions. Command type support: start workflow, start execution from current node, resume fault-tolerant workflow, resume pause process, start execution from failed node, complement, timer, rerun, pause, stop, resume waiting thread. Where **recovers the fault-tolerant workflow** and **restores the waiting thread** The two command types are used by the scheduling internal control and cannot be called externally
diff --git a/docs/docs/en/guide/parameter/context.md b/docs/docs/en/guide/parameter/context.md
index 9d6131d92e..5291d91fa2 100644
--- a/docs/docs/en/guide/parameter/context.md
+++ b/docs/docs/en/guide/parameter/context.md
@@ -106,11 +106,11 @@ Save the subprocess_example1 workflow and set the global parameters var1.
![context-subprocess02](../../../../img/new_ui/dev/parameter/context-subprocess02.png)
-Create a sub_process task in a new workflow, and use the subprocess_example1 workflow as the sub-node.
+Create a sub_workflow task in a new workflow, and use the subprocess_example1 workflow as the sub-node.
![context-subprocess03](../../../../img/new_ui/dev/parameter/context-subprocess03.png)
-Create a shell task as a downstream task of the sub_process task, and write the following script:
+Create a shell task as a downstream task of the sub_workflow task, and write the following script:
![context-subprocess04](../../../../img/new_ui/dev/parameter/context-subprocess04.png)
diff --git a/docs/docs/en/guide/project/project-list.md b/docs/docs/en/guide/project/project-list.md
index 253930e34c..fb6b8ace8e 100644
--- a/docs/docs/en/guide/project/project-list.md
+++ b/docs/docs/en/guide/project/project-list.md
@@ -2,14 +2,14 @@
This page describes details regarding Project screen in Apache DolphinScheduler. Here, you will see all the functions which can be handled in this screen. The following table explains commonly used terms in Apache DolphinScheduler:
-| Glossary | description |
-|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| DAG | Tasks in a workflow are assembled in form of Directed Acyclic Graph (DAG). A topological traversal is performed from nodes with zero degrees of entry until there are no subsequent nodes. |
-| Workflow Definition | Visualization formed by dragging task nodes and establishing task node associations (DAG). |
-| Workflow Instance | Instantiation of the workflow definition, which can be generated by manual start or scheduled scheduling. Each time the process definition runs, a workflow instance is generated. |
-| Workflow Relation | Shows dynamic status of all the workflows in a project. |
-| Task | Task is a discrete action in a Workflow. Apache DolphinScheduler supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT ( depends), and plans to support dynamic plug-in expansion, (SUB_PROCESS). It is also a separate process definition that can be started and executed separately. |
-| Task Instance | Instantiation of the task node in the process definition, which identifies the specific task execution status. |
+| Glossary | description |
+|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| DAG | Tasks in a workflow are assembled in form of Directed Acyclic Graph (DAG). A topological traversal is performed from nodes with zero degrees of entry until there are no subsequent nodes. |
+| Workflow Definition | Visualization formed by dragging task nodes and establishing task node associations (DAG). |
+| Workflow Instance | Instantiation of the workflow definition, which can be generated by manual start or scheduled scheduling. Each time the process definition runs, a workflow instance is generated. |
+| Workflow Relation | Shows dynamic status of all the workflows in a project. |
+| Task | Task is a discrete action in a Workflow. Apache DolphinScheduler supports SHELL, SQL, SUB_WORKFLOW, PROCEDURE, MR, SPARK, PYTHON, DEPENDENT ( depends), and plans to support dynamic plug-in expansion, (SUB_WORKFLOW). It is also a separate process definition that can be started and executed separately. |
+| Task Instance | Instantiation of the task node in the process definition, which identifies the specific task execution status. |
## Project List
diff --git a/docs/docs/en/guide/resource/task-group.md b/docs/docs/en/guide/resource/task-group.md
index b66a4878b2..c21d02dbef 100644
--- a/docs/docs/en/guide/resource/task-group.md
+++ b/docs/docs/en/guide/resource/task-group.md
@@ -30,7 +30,7 @@ Click the button to view task group usage information:
### Use of Task Groups
-**Note**: The use of task groups is applicable to tasks executed by workers, such as `switch` nodes, `condition` nodes, `sub_process` and other node types executed by the master are not controlled by the task group.
+**Note**: The use of task groups is applicable to tasks executed by workers, such as `switch` nodes, `condition` nodes, `sub_workflow` and other node types executed by the master are not controlled by the task group.
Let's take the shell node as an example:
diff --git a/docs/docs/en/guide/task/sub-process.md b/docs/docs/en/guide/task/sub-process.md
index 8284dbf2d9..5f182e770b 100644
--- a/docs/docs/en/guide/task/sub-process.md
+++ b/docs/docs/en/guide/task/sub-process.md
@@ -7,7 +7,7 @@ The sub-process node is to execute an external workflow definition as a task nod
## Create Task
- Click `Project Management -> Project Name -> Workflow Definition`, and click the `Create Workflow` button to enter the DAG editing page.
-- Drag from the toolbar task node to canvas to create a new SubProcess task.
+- Drag from the toolbar task node to canvas to create a new SubProcess task.
## Task Parameter
@@ -30,16 +30,16 @@ Create a shell task to print "hello" and define the workflow as `test_dag01`.
![subprocess_task01](../../../../img/tasks/demo/subprocess_task01.png)
-## Create the Sub_process task
+## Create the Sub_workflow task
-To use the sub_process, you need to create the sub-node task, which is the workflow `test_dag01` we created in the first step. After that, as shown in the diagram below, select the corresponding sub-node in position ⑤.
+To use the sub_workflow, you need to create the sub-node task, which is the workflow `test_dag01` we created in the first step. After that, as shown in the diagram below, select the corresponding sub-node in position ⑤.
![subprocess_task02](../../../../img/tasks/demo/subprocess_task02.png)
-After creating the sub_process, create a corresponding shell task for printing "world" and link both together. Save the current workflow and run it to get the expected result.
+After creating the sub_workflow, create a corresponding shell task for printing "world" and link both together. Save the current workflow and run it to get the expected result.
![subprocess_task03](../../../../img/tasks/demo/subprocess_task03.png)
## Note
-When using `sub_process` to recall a sub-node task, you need to ensure that the defined sub-node is online status, otherwise, the sub_process workflow will not work properly.
+When using `sub_workflow` to recall a sub-node task, you don't need to ensure that the defined sub-node is online status.
diff --git a/docs/docs/en/guide/upgrade/incompatible.md b/docs/docs/en/guide/upgrade/incompatible.md
index d20dba51ba..62852e2463 100644
--- a/docs/docs/en/guide/upgrade/incompatible.md
+++ b/docs/docs/en/guide/upgrade/incompatible.md
@@ -30,4 +30,6 @@ This document records the incompatible updates between each version. You need to
* Remove the `udf-manage` function from the `resource center` ([#16209])
* Remove the `Pigeon` from the `Task Plugin` ([#16218])
+* Uniformly name `process` in code as `workflow` ([#16515])
+* Deprecated upgrade code of 1.x and 2.x in 3.3.0-release ([#16543])
diff --git a/docs/docs/en/guide/upgrade/upgrade.md b/docs/docs/en/guide/upgrade/upgrade.md
index 9eb2987170..074b1ca442 100644
--- a/docs/docs/en/guide/upgrade/upgrade.md
+++ b/docs/docs/en/guide/upgrade/upgrade.md
@@ -60,7 +60,7 @@ Execute script: `sh ./tools/bin/migrate-lineage.sh`.
Execution result:
-- Migrate lineage data to new table `t_ds_process_task_lineage`.
+- Migrate lineage data to new table `t_ds_workflow_task_lineage`.
- This script only performs upsert operations, not deletes. You can delete it manually if you need to.
### Upgrade Service
diff --git a/docs/docs/zh/about/glossary.md b/docs/docs/zh/about/glossary.md
index 2b5876669e..009d412276 100644
--- a/docs/docs/zh/about/glossary.md
+++ b/docs/docs/zh/about/glossary.md
@@ -14,7 +14,7 @@
**任务实例**:任务实例是流程定义中任务节点的实例化,标识着某个具体的任务
-**任务类型**:目前支持有 SHELL、SQL、SUB_PROCESS(子流程)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中 **SUB_PROCESS**类型的任务需要关联另外一个流程定义,被关联的流程定义是可以单独启动执行的
+**任务类型**:目前支持有 SHELL、SQL、SUB_WORKFLOW(子工作流)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中 **SUB_WORKFLOW**类型的任务需要关联另外一个流程定义,被关联的流程定义是可以单独启动执行的
**调度方式**:系统支持基于 cron 表达式的定时调度和手动调度。命令类型支持:启动工作流、从当前节点开始执行、恢复被容错的工作流、恢复暂停流程、从失败节点开始执行、补数、定时、重跑、暂停、停止、恢复等待线程。
其中 **恢复被容错的工作流** 和 **恢复等待线程** 两种命令类型是由调度内部控制使用,外部无法调用
diff --git a/docs/docs/zh/architecture/task-structure.md b/docs/docs/zh/architecture/task-structure.md
index 656e57eed7..b9177add37 100644
--- a/docs/docs/zh/architecture/task-structure.md
+++ b/docs/docs/zh/architecture/task-structure.md
@@ -918,7 +918,7 @@
```bash
{
- "type":"SUB_PROCESS",
+ "type":"SUB_WORKFLOW",
"id":"tasks-14806",
"name":"SubProcessTask",
"params":{
diff --git a/docs/docs/zh/contribute/architecture-design.md b/docs/docs/zh/contribute/architecture-design.md
index 091dff8b07..9d82714327 100644
--- a/docs/docs/zh/contribute/architecture-design.md
+++ b/docs/docs/zh/contribute/architecture-design.md
@@ -19,7 +19,7 @@
**任务实例**:任务实例是流程定义中任务节点的实例化,标识着具体的任务执行状态
-**任务类型**: 目前支持有 SHELL、SQL、SUB_PROCESS(子流程)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中子 **SUB_PROCESS** 也是一个单独的流程定义,是可以单独启动执行的
+**任务类型**: 目前支持有 SHELL、SQL、SUB_WORKFLOW(子工作流)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中子 **SUB_WORKFLOW** 也是一个单独的流程定义,是可以单独启动执行的
**调度方式:** 系统支持基于 cron 表达式的定时调度和手动调度。命令类型支持:启动工作流、从当前节点开始执行、恢复被容错的工作流、恢复暂停流程、从失败节点开始执行、补数、定时、重跑、暂停、停止、恢复等待线程。其中 **恢复被容错的工作流** 和 **恢复等待线程** 两种命令类型是由调度内部控制使用,外部无法调用
diff --git a/docs/docs/zh/guide/parameter/context.md b/docs/docs/zh/guide/parameter/context.md
index 7f5870458d..51b1cfcff4 100644
--- a/docs/docs/zh/guide/parameter/context.md
+++ b/docs/docs/zh/guide/parameter/context.md
@@ -105,11 +105,11 @@ Node_mysql 运行结果如下:
![context-subprocess02](../../../../img/new_ui/dev/parameter/context-subprocess02.png)
-在新的工作流中创建 sub_process 任务,使用 subprocess_example1 工作流作为子节点。
+在新的工作流中创建 sub_workflow 任务,使用 subprocess_example1 工作流作为子节点。
![context-subprocess03](../../../../img/new_ui/dev/parameter/context-subprocess03.png)
-创建一个 shell 任务作为 sub_process 任务的下游任务,并编写如下脚本:
+创建一个 shell 任务作为 sub_workflow 任务的下游任务,并编写如下脚本:
![context-subprocess04](../../../../img/new_ui/dev/parameter/context-subprocess04.png)
diff --git a/docs/docs/zh/guide/resource/task-group.md b/docs/docs/zh/guide/resource/task-group.md
index 58b46e26d5..84833fb282 100644
--- a/docs/docs/zh/guide/resource/task-group.md
+++ b/docs/docs/zh/guide/resource/task-group.md
@@ -32,7 +32,7 @@
#### 任务组的使用
-注:任务组的使用适用于由 worker 执行的任务,例如【switch】节点、【condition】节点、【sub_process】等由 master 负责执行的节点类型不受任务组控制。
+注:任务组的使用适用于由 worker 执行的任务,例如【switch】节点、【condition】节点、【sub_workflow】等由 master 负责执行的节点类型不受任务组控制。
我们以 shell 节点为例:
diff --git a/docs/docs/zh/guide/task/sub-process.md b/docs/docs/zh/guide/task/sub-process.md
index 8095e4b4a3..2fb091c7e0 100644
--- a/docs/docs/zh/guide/task/sub-process.md
+++ b/docs/docs/zh/guide/task/sub-process.md
@@ -8,7 +8,7 @@
- 点击项目管理 -> 项目名称 -> 工作流定义,点击”创建工作流”按钮,进入 DAG 编辑页面:
-- 拖动工具栏的 任务节点到画板中。
+- 拖动工具栏的 任务节点到画板中。
## 任务参数
@@ -31,16 +31,16 @@
![subprocess_task01](../../../../img/tasks/demo/subprocess_task01.png)
-### 创建 sub_process 任务
+### 创建 sub_workflow 任务
-在使用 sub_process 的过程中,需要创建所需的子结点任务,也就是我们第一步所创建的 test_dag01 工作流。然后如下图所示,在 ⑤ 的位置选择对应的子结点即可。
+在使用 sub_workflow 的过程中,需要创建所需的子结点任务,也就是我们第一步所创建的 test_dag01 工作流。然后如下图所示,在 ⑤ 的位置选择对应的子结点即可。
![subprocess_task02](../../../../img/tasks/demo/subprocess_task02.png)
-创建 sub_process 完成之后,再创建一个对应的 shell 任务,用于打印 “world”,并将二者连接起来。保存当前工作流,并上线运行,即可得到想要的结果。
+创建 sub_workflow 完成之后,再创建一个对应的 shell 任务,用于打印 “world”,并将二者连接起来。保存当前工作流,并上线运行,即可得到想要的结果。
![subprocess_task03](../../../../img/tasks/demo/subprocess_task03.png)
## 注意事项
-在使用 sub_process 调用子结点任务的时候,需要保证定义的子结点为上线状态,否则 sub_process 的工作流无法正常运行。
+在使用 sub_workflow 调用子结点任务的时候,不需要保证定义的子结点为上线状态。
diff --git a/docs/docs/zh/guide/upgrade/incompatible.md b/docs/docs/zh/guide/upgrade/incompatible.md
index a5260a0695..9dd689f0ae 100644
--- a/docs/docs/zh/guide/upgrade/incompatible.md
+++ b/docs/docs/zh/guide/upgrade/incompatible.md
@@ -28,4 +28,6 @@
* 从 `资源中心` 中移除了 `udf-manage` 功能 ([#16209])
* 从 `任务插件` 中移除了 `Pigeon` 类型 ([#16218])
+* 统一代码中的 `process` 为 `workflow` ([#16515])
+* 在 3.3.0-release 中废弃了从 1.x 至 2.x 的升级代码 ([#16543])
diff --git a/docs/docs/zh/guide/upgrade/upgrade.md b/docs/docs/zh/guide/upgrade/upgrade.md
index 4866458c04..c2cec6783a 100644
--- a/docs/docs/zh/guide/upgrade/upgrade.md
+++ b/docs/docs/zh/guide/upgrade/upgrade.md
@@ -59,7 +59,7 @@ jar 包 并添加到 `./tools/libs` 目录下,设置以下环境变量
执行结果:
-- 原血缘数据迁移至新血缘表 `t_ds_process_task_lineage`。
+- 原血缘数据迁移至新血缘表 `t_ds_workflow_task_lineage`。
- 此脚本仅执行 upsert 操作,不执行删除操作,如果需要删除,您可以手动删除。
### 服务升级
diff --git a/docs/img/tasks/icons/sub_process.png b/docs/img/tasks/icons/sub_workflow.png
similarity index 100%
rename from docs/img/tasks/icons/sub_process.png
rename to docs/img/tasks/icons/sub_workflow.png
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java
index 55639bd20f..303aa8ac5a 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java
@@ -25,7 +25,7 @@ import org.apache.dolphinscheduler.api.test.entity.LoginResponseData;
import org.apache.dolphinscheduler.api.test.pages.LoginPage;
import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.ExecutorPage;
-import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage;
+import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage;
import org.apache.dolphinscheduler.api.test.utils.JSONUtils;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
@@ -50,7 +50,7 @@ import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
-//TODO: Some test cases rely on ProcessInstance APIs. Should complete remaining cases after ProcessInstance related API tests done.
+//TODO: Some test cases rely on WorkflowInstance APIs. Should complete remaining cases after WorkflowInstance related API tests done.
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
public class ExecutorAPITest {
@@ -65,13 +65,13 @@ public class ExecutorAPITest {
private static ExecutorPage executorPage;
- private static ProcessDefinitionPage processDefinitionPage;
+ private static WorkflowDefinitionPage workflowDefinitionPage;
private static ProjectPage projectPage;
private static long projectCode;
- private static long processDefinitionCode;
+ private static long workflowDefinitionCode;
private static List workflowInstanceIds;
@@ -82,7 +82,7 @@ public class ExecutorAPITest {
sessionId =
JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId();
executorPage = new ExecutorPage(sessionId);
- processDefinitionPage = new ProcessDefinitionPage(sessionId);
+ workflowDefinitionPage = new WorkflowDefinitionPage(sessionId);
projectPage = new ProjectPage(sessionId);
loginUser = new User();
loginUser.setUserName("admin");
@@ -97,7 +97,7 @@ public class ExecutorAPITest {
@Test
@Order(1)
- public void testStartProcessInstance() {
+ public void testStartWorkflowInstance() {
try {
// create test project
HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test");
@@ -109,36 +109,36 @@ public class ExecutorAPITest {
// upload test workflow definition json
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource("workflow-json/test.json").getFile());
- CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage
- .importProcessDefinition(loginUser, projectCode, file);
- String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity());
+ CloseableHttpResponse importWorkflowDefinitionResponse = workflowDefinitionPage
+ .importWorkflowDefinition(loginUser, projectCode, file);
+ String data = EntityUtils.toString(importWorkflowDefinitionResponse.getEntity());
Assertions.assertTrue(data.contains("\"success\":true"));
// get workflow definition code
- HttpResponse queryAllProcessDefinitionByProjectCodeResponse =
- processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode);
- Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess());
- Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString()
+ HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse =
+ workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode);
+ Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess());
+ Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getData().toString()
.contains("hello world"));
- processDefinitionCode =
- (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse
- .getBody().getData()).get(0)).get("processDefinition")).get("code");
+ workflowDefinitionCode =
+ (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse
+ .getBody().getData()).get(0)).get("workflowDefinition")).get("code");
// release test workflow
- HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser,
- projectCode, processDefinitionCode, ReleaseState.ONLINE);
- Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess());
+ HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser,
+ projectCode, workflowDefinitionCode, ReleaseState.ONLINE);
+ Assertions.assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess());
// trigger workflow instance
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date date = new Date();
String scheduleTime = String.format("%s,%s", formatter.format(date), formatter.format(date));
log.info("use current time {} as scheduleTime", scheduleTime);
- HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode,
- processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE);
- Assertions.assertTrue(startProcessInstanceResponse.getBody().getSuccess());
+ HttpResponse startWorkflowInstanceResponse = executorPage.startWorkflowInstance(loginUser, projectCode,
+ workflowDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE);
+ Assertions.assertTrue(startWorkflowInstanceResponse.getBody().getSuccess());
- workflowInstanceIds = (List) startProcessInstanceResponse.getBody().getData();
+ workflowInstanceIds = (List) startWorkflowInstanceResponse.getBody().getData();
} catch (Exception e) {
log.error("failed", e);
Assertions.fail();
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java
index cf5621f06c..25e4d09042 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java
@@ -39,9 +39,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
+import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
+@DisableIfTestFails
// TODO: Add more detailed permission control related cases after userPage test cases completed
public class ProjectAPITest {
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java
index 9d02acfd28..715b73e849 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java
@@ -24,8 +24,8 @@ import org.apache.dolphinscheduler.api.test.entity.HttpResponse;
import org.apache.dolphinscheduler.api.test.entity.LoginResponseData;
import org.apache.dolphinscheduler.api.test.pages.LoginPage;
import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage;
-import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.SchedulerPage;
+import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage;
import org.apache.dolphinscheduler.api.test.utils.JSONUtils;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
@@ -42,9 +42,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
+import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
+@DisableIfTestFails
public class SchedulerAPITest {
private static final String username = "admin";
@@ -57,13 +59,13 @@ public class SchedulerAPITest {
private static SchedulerPage schedulerPage;
- private static ProcessDefinitionPage processDefinitionPage;
+ private static WorkflowDefinitionPage workflowDefinitionPage;
private static ProjectPage projectPage;
private static long projectCode;
- private static long processDefinitionCode;
+ private static long workflowDefinitionCode;
private static int scheduleId;
@@ -75,7 +77,7 @@ public class SchedulerAPITest {
JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId();
projectPage = new ProjectPage(sessionId);
schedulerPage = new SchedulerPage(sessionId);
- processDefinitionPage = new ProcessDefinitionPage(sessionId);
+ workflowDefinitionPage = new WorkflowDefinitionPage(sessionId);
loginUser = new User();
loginUser.setUserName("admin");
loginUser.setId(1);
@@ -98,20 +100,20 @@ public class SchedulerAPITest {
.getBody().getData()).get(0)).get("code");
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource("workflow-json/test.json").getFile());
- processDefinitionPage.importProcessDefinition(loginUser, projectCode, file);
- HttpResponse queryAllProcessDefinitionByProjectCodeResponse =
- processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode);
- Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess());
- processDefinitionCode =
- (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse
- .getBody().getData()).get(0)).get("processDefinition")).get("code");
-
- processDefinitionPage.releaseProcessDefinition(loginUser, projectCode, processDefinitionCode,
+ workflowDefinitionPage.importWorkflowDefinition(loginUser, projectCode, file);
+ HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse =
+ workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode);
+ Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess());
+ workflowDefinitionCode =
+ (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse
+ .getBody().getData()).get(0)).get("workflowDefinition")).get("code");
+
+ workflowDefinitionPage.releaseWorkflowDefinition(loginUser, projectCode, workflowDefinitionCode,
ReleaseState.ONLINE);
final String schedule =
"{\"startTime\":\"2019-08-08 00:00:00\",\"endTime\":\"2100-08-08 00:00:00\",\"timezoneId\":\"America/Phoenix\",\"crontab\":\"0 0 3/6 * * ? *\"}";
HttpResponse createScheduleResponse =
- schedulerPage.createSchedule(loginUser, projectCode, processDefinitionCode, schedule);
+ schedulerPage.createSchedule(loginUser, projectCode, workflowDefinitionCode, schedule);
Assertions.assertTrue(createScheduleResponse.getBody().getSuccess());
Assertions.assertTrue(createScheduleResponse.getBody().getData().toString().contains("2019-08-08"));
}
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java
index 2f8e6aa056..9f8407a469 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java
@@ -35,9 +35,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
+import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
+@DisableIfTestFails
public class TenantAPITest {
private static final String tenant = System.getProperty("user.name");
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java
index d34f6bad2d..05e6e1e8dc 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java
@@ -40,9 +40,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
+import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
+@DisableIfTestFails
public class WorkerGroupAPITest {
private static final String username = "admin";
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowDefinitionAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowDefinitionAPITest.java
index ab0463d3ed..2408753e41 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowDefinitionAPITest.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowDefinitionAPITest.java
@@ -24,7 +24,7 @@ import org.apache.dolphinscheduler.api.test.entity.HttpResponse;
import org.apache.dolphinscheduler.api.test.entity.LoginResponseData;
import org.apache.dolphinscheduler.api.test.pages.LoginPage;
import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage;
-import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage;
+import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage;
import org.apache.dolphinscheduler.api.test.utils.JSONUtils;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
@@ -44,9 +44,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
+import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
+@DisableIfTestFails
public class WorkflowDefinitionAPITest {
private static final String username = "admin";
@@ -57,15 +59,15 @@ public class WorkflowDefinitionAPITest {
private static User loginUser;
- private static ProcessDefinitionPage processDefinitionPage;
+ private static WorkflowDefinitionPage workflowDefinitionPage;
private static ProjectPage projectPage;
private static long projectCode;
- private static long processDefinitionCode;
+ private static long workflowDefinitionCode;
- private static String processDefinitionName;
+ private static String workflowDefinitionName;
@BeforeAll
public static void setup() {
@@ -73,7 +75,7 @@ public class WorkflowDefinitionAPITest {
HttpResponse loginHttpResponse = loginPage.login(username, password);
sessionId =
JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId();
- processDefinitionPage = new ProcessDefinitionPage(sessionId);
+ workflowDefinitionPage = new WorkflowDefinitionPage(sessionId);
projectPage = new ProjectPage(sessionId);
loginUser = new User();
loginUser.setId(123);
@@ -87,7 +89,7 @@ public class WorkflowDefinitionAPITest {
@Test
@Order(1)
- public void testImportProcessDefinition() {
+ public void testImportWorkflowDefinition() {
try {
HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test");
HttpResponse queryAllProjectListResponse = projectPage.queryAllProjectList(loginUser);
@@ -97,9 +99,9 @@ public class WorkflowDefinitionAPITest {
.getBody().getData()).get(0)).get("code");
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource("workflow-json/test.json").getFile());
- CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage
- .importProcessDefinition(loginUser, projectCode, file);
- String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity());
+ CloseableHttpResponse importWorkflowDefinitionResponse = workflowDefinitionPage
+ .importWorkflowDefinition(loginUser, projectCode, file);
+ String data = EntityUtils.toString(importWorkflowDefinitionResponse.getEntity());
Assertions.assertTrue(data.contains("\"success\":true"));
} catch (Exception e) {
log.error("failed", e);
@@ -109,93 +111,95 @@ public class WorkflowDefinitionAPITest {
@Test
@Order(2)
- public void testQueryAllProcessDefinitionByProjectCode() {
- HttpResponse queryAllProcessDefinitionByProjectCodeResponse =
- processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode);
- Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess());
+ public void testQueryAllWorkflowDefinitionByProjectCode() {
+ HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse =
+ workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode);
+ Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess());
Assertions.assertTrue(
- queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world"));
- processDefinitionCode =
- (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse
- .getBody().getData()).get(0)).get("processDefinition")).get("code");
- processDefinitionName =
- (String) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse
- .getBody().getData()).get(0)).get("processDefinition")).get("name");
+ queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world"));
+ workflowDefinitionCode =
+ (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse
+ .getBody().getData()).get(0)).get("workflowDefinition")).get("code");
+ workflowDefinitionName =
+ (String) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse
+ .getBody().getData()).get(0)).get("workflowDefinition")).get("name");
}
@Test
@Order(3)
- public void testQueryProcessDefinitionByCode() {
- HttpResponse queryProcessDefinitionByCodeResponse =
- processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode);
- Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getSuccess());
+ public void testQueryWorkflowDefinitionByCode() {
+ HttpResponse queryWorkflowDefinitionByCodeResponse =
+ workflowDefinitionPage.queryWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode);
+ Assertions.assertTrue(queryWorkflowDefinitionByCodeResponse.getBody().getSuccess());
Assertions.assertTrue(
- queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("hello world"));
+ queryWorkflowDefinitionByCodeResponse.getBody().getData().toString().contains("hello world"));
}
@Test
@Order(4)
- public void testgetProcessListByProjectCode() {
- HttpResponse getProcessListByProjectCodeResponse =
- processDefinitionPage.getProcessListByProjectCode(loginUser, projectCode);
- Assertions.assertTrue(getProcessListByProjectCodeResponse.getBody().getSuccess());
+ public void testGetWorkflowListByProjectCode() {
+ HttpResponse getWorkflowListByProjectCodeResponse =
+ workflowDefinitionPage.getWorkflowListByProjectCode(loginUser, projectCode);
+ Assertions.assertTrue(getWorkflowListByProjectCodeResponse.getBody().getSuccess());
Assertions
- .assertTrue(getProcessListByProjectCodeResponse.getBody().getData().toString().contains("test_import"));
+ .assertTrue(
+ getWorkflowListByProjectCodeResponse.getBody().getData().toString().contains("test_import"));
}
@Test
@Order(5)
- public void testQueryProcessDefinitionByName() {
- HttpResponse queryProcessDefinitionByNameResponse =
- processDefinitionPage.queryProcessDefinitionByName(loginUser, projectCode, processDefinitionName);
- Assertions.assertTrue(queryProcessDefinitionByNameResponse.getBody().getSuccess());
+ public void testQueryWorkflowDefinitionByName() {
+ HttpResponse queryWorkflowDefinitionByNameResponse =
+ workflowDefinitionPage.queryWorkflowDefinitionByName(loginUser, projectCode, workflowDefinitionName);
+ Assertions.assertTrue(queryWorkflowDefinitionByNameResponse.getBody().getSuccess());
Assertions.assertTrue(
- queryProcessDefinitionByNameResponse.getBody().getData().toString().contains("hello world"));
+ queryWorkflowDefinitionByNameResponse.getBody().getData().toString().contains("hello world"));
}
@Test
@Order(6)
- public void testQueryProcessDefinitionList() {
- HttpResponse queryProcessDefinitionListResponse =
- processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode);
- Assertions.assertTrue(queryProcessDefinitionListResponse.getBody().getSuccess());
+ public void testQueryWorkflowDefinitionList() {
+ HttpResponse queryWorkflowDefinitionListResponse =
+ workflowDefinitionPage.queryWorkflowDefinitionList(loginUser, projectCode);
+ Assertions.assertTrue(queryWorkflowDefinitionListResponse.getBody().getSuccess());
Assertions
- .assertTrue(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world"));
+ .assertTrue(queryWorkflowDefinitionListResponse.getBody().getData().toString().contains("hello world"));
}
@Test
@Order(7)
- public void testReleaseProcessDefinition() {
- HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser,
- projectCode, processDefinitionCode, ReleaseState.ONLINE);
- Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess());
-
- HttpResponse queryProcessDefinitionByCodeResponse =
- processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode);
- Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getSuccess());
+ public void testReleaseWorkflowDefinition() {
+ HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser,
+ projectCode, workflowDefinitionCode, ReleaseState.ONLINE);
+ Assertions.assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess());
+
+ HttpResponse queryWorkflowDefinitionByCodeResponse =
+ workflowDefinitionPage.queryWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode);
+ Assertions.assertTrue(queryWorkflowDefinitionByCodeResponse.getBody().getSuccess());
Assertions.assertTrue(
- queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("releaseState=ONLINE"));
+ queryWorkflowDefinitionByCodeResponse.getBody().getData().toString().contains("releaseState=ONLINE"));
}
@Test
@Order(8)
- public void testDeleteProcessDefinitionByCode() {
- HttpResponse deleteProcessDefinitionByCodeResponse =
- processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode);
- Assertions.assertFalse(deleteProcessDefinitionByCodeResponse.getBody().getSuccess());
-
- HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser,
- projectCode, processDefinitionCode, ReleaseState.OFFLINE);
- Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess());
-
- deleteProcessDefinitionByCodeResponse =
- processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode);
- Assertions.assertTrue(deleteProcessDefinitionByCodeResponse.getBody().getSuccess());
-
- HttpResponse queryProcessDefinitionListResponse =
- processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode);
- Assertions.assertTrue(queryProcessDefinitionListResponse.getBody().getSuccess());
+ public void testDeleteWorkflowDefinitionByCode() {
+ HttpResponse deleteWorkflowDefinitionByCodeResponse =
+ workflowDefinitionPage.deleteWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode);
+ Assertions.assertFalse(deleteWorkflowDefinitionByCodeResponse.getBody().getSuccess());
+
+ HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser,
+ projectCode, workflowDefinitionCode, ReleaseState.OFFLINE);
+ Assertions.assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess());
+
+ deleteWorkflowDefinitionByCodeResponse =
+ workflowDefinitionPage.deleteWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode);
+ Assertions.assertTrue(deleteWorkflowDefinitionByCodeResponse.getBody().getSuccess());
+
+ HttpResponse queryWorkflowDefinitionListResponse =
+ workflowDefinitionPage.queryWorkflowDefinitionList(loginUser, projectCode);
+ Assertions.assertTrue(queryWorkflowDefinitionListResponse.getBody().getSuccess());
Assertions
- .assertFalse(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world"));
+ .assertFalse(
+ queryWorkflowDefinitionListResponse.getBody().getData().toString().contains("hello world"));
}
}
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowInstanceAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowInstanceAPITest.java
index 35523f5e44..1025292b81 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowInstanceAPITest.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowInstanceAPITest.java
@@ -28,8 +28,8 @@ import org.apache.dolphinscheduler.api.test.entity.LoginResponseData;
import org.apache.dolphinscheduler.api.test.pages.LoginPage;
import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.ExecutorPage;
-import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage;
-import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessInstancePage;
+import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage;
+import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowInstancePage;
import org.apache.dolphinscheduler.api.test.utils.JSONUtils;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
@@ -55,10 +55,12 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
+import org.junitpioneer.jupiter.DisableIfTestFails;
import org.testcontainers.shaded.org.awaitility.Awaitility;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
+@DisableIfTestFails
public class WorkflowInstanceAPITest {
private static final String username = "admin";
@@ -69,19 +71,19 @@ public class WorkflowInstanceAPITest {
private static User loginUser;
- private static ProcessInstancePage processInstancePage;
+ private static WorkflowInstancePage workflowInstancePage;
private static ExecutorPage executorPage;
- private static ProcessDefinitionPage processDefinitionPage;
+ private static WorkflowDefinitionPage workflowDefinitionPage;
private static ProjectPage projectPage;
private static long projectCode;
- private static long processDefinitionCode;
+ private static long workflowDefinitionCode;
- private static int processInstanceId;
+ private static int workflowInstanceId;
@BeforeAll
public static void setup() {
@@ -89,9 +91,9 @@ public class WorkflowInstanceAPITest {
HttpResponse loginHttpResponse = loginPage.login(username, password);
sessionId =
JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId();
- processInstancePage = new ProcessInstancePage(sessionId);
+ workflowInstancePage = new WorkflowInstancePage(sessionId);
executorPage = new ExecutorPage(sessionId);
- processDefinitionPage = new ProcessDefinitionPage(sessionId);
+ workflowDefinitionPage = new WorkflowDefinitionPage(sessionId);
projectPage = new ProjectPage(sessionId);
loginUser = new User();
loginUser.setUserName("admin");
@@ -106,7 +108,7 @@ public class WorkflowInstanceAPITest {
@Test
@Order(1)
- public void testQueryProcessInstancesByWorkflowInstanceId() {
+ public void testQueryWorkflowInstancesByWorkflowInstanceId() {
try {
// create test project
HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test");
@@ -118,49 +120,50 @@ public class WorkflowInstanceAPITest {
// upload test workflow definition json
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource("workflow-json/test.json").getFile());
- CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage
- .importProcessDefinition(loginUser, projectCode, file);
- String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity());
+ CloseableHttpResponse importWorkflowDefinitionResponse = workflowDefinitionPage
+ .importWorkflowDefinition(loginUser, projectCode, file);
+ String data = EntityUtils.toString(importWorkflowDefinitionResponse.getEntity());
assertTrue(data.contains("\"success\":true"));
// get workflow definition code
- HttpResponse queryAllProcessDefinitionByProjectCodeResponse =
- processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode);
- assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess());
- assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString()
+ HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse =
+ workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode);
+ assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess());
+ assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getData().toString()
.contains("hello world"));
- processDefinitionCode =
- (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse
- .getBody().getData()).get(0)).get("processDefinition")).get("code");
+ workflowDefinitionCode =
+ (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse
+ .getBody().getData()).get(0)).get("workflowDefinition")).get("code");
// release test workflow
- HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser,
- projectCode, processDefinitionCode, ReleaseState.ONLINE);
- assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess());
+ HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser,
+ projectCode, workflowDefinitionCode, ReleaseState.ONLINE);
+ assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess());
// trigger workflow instance
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date date = new Date();
String scheduleTime = String.format("%s,%s", formatter.format(date), formatter.format(date));
log.info("use current time {} as scheduleTime", scheduleTime);
- HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode,
- processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE);
- assertTrue(startProcessInstanceResponse.getBody().getSuccess());
- final List workflowInstanceIds = (List) startProcessInstanceResponse.getBody().getData();
+ HttpResponse startWorkflowInstanceResponse = executorPage.startWorkflowInstance(loginUser, projectCode,
+ workflowDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE);
+ assertTrue(startWorkflowInstanceResponse.getBody().getSuccess());
+ final List workflowInstanceIds = (List) startWorkflowInstanceResponse.getBody().getData();
assertEquals(1, workflowInstanceIds.size());
- processInstanceId = workflowInstanceIds.get(0);
+ workflowInstanceId = workflowInstanceIds.get(0);
- // make sure process instance has completed and successfully persisted into db
+ // make sure workflow instance has completed and successfully persisted into db
Awaitility.await()
.atMost(30, TimeUnit.SECONDS)
.untilAsserted(() -> {
// query workflow instance by trigger code
- HttpResponse queryProcessInstanceListResponse =
- processInstancePage.queryProcessInstanceById(loginUser, projectCode, processInstanceId);
- assertTrue(queryProcessInstanceListResponse.getBody().getSuccess());
+ HttpResponse queryWorkflowInstanceListResponse =
+ workflowInstancePage.queryWorkflowInstanceById(loginUser, projectCode,
+ workflowInstanceId);
+ assertTrue(queryWorkflowInstanceListResponse.getBody().getSuccess());
final Map workflowInstance =
- (Map) queryProcessInstanceListResponse.getBody().getData();
+ (Map) queryWorkflowInstanceListResponse.getBody().getData();
assertEquals("SUCCESS", workflowInstance.get("state"));
});
} catch (Exception e) {
@@ -171,42 +174,43 @@ public class WorkflowInstanceAPITest {
@Test
@Order(2)
- public void testQueryProcessInstanceList() {
- HttpResponse queryProcessInstanceListResponse =
- processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10);
- assertTrue(queryProcessInstanceListResponse.getBody().getSuccess());
- assertTrue(queryProcessInstanceListResponse.getBody().getData().toString().contains("test_import"));
+ public void testQueryWorkflowInstanceList() {
+ HttpResponse queryWorkflowInstanceListResponse =
+ workflowInstancePage.queryWorkflowInstanceList(loginUser, projectCode, 1, 10);
+ assertTrue(queryWorkflowInstanceListResponse.getBody().getSuccess());
+ assertTrue(queryWorkflowInstanceListResponse.getBody().getData().toString().contains("test_import"));
}
@Test
@Order(3)
- public void testQueryTaskListByProcessId() {
- HttpResponse queryTaskListByProcessIdResponse =
- processInstancePage.queryTaskListByProcessId(loginUser, projectCode, processInstanceId);
- assertTrue(queryTaskListByProcessIdResponse.getBody().getSuccess());
- assertTrue(queryTaskListByProcessIdResponse.getBody().getData().toString().contains("test_import"));
+ public void testQueryTaskListByWorkflowInstanceId() {
+ HttpResponse queryTaskListByWorkflowInstanceIdResponse =
+ workflowInstancePage.queryTaskListByWorkflowInstanceId(loginUser, projectCode, workflowInstanceId);
+ assertTrue(queryTaskListByWorkflowInstanceIdResponse.getBody().getSuccess());
+ assertTrue(queryTaskListByWorkflowInstanceIdResponse.getBody().getData().toString().contains("test_import"));
}
@Test
@Order(4)
- public void testQueryProcessInstanceById() {
- HttpResponse queryProcessInstanceByIdResponse =
- processInstancePage.queryProcessInstanceById(loginUser, projectCode, processInstanceId);
- assertTrue(queryProcessInstanceByIdResponse.getBody().getSuccess());
- assertTrue(queryProcessInstanceByIdResponse.getBody().getData().toString().contains("test_import"));
+ public void testQueryWorkflowInstanceById() {
+ HttpResponse queryWorkflowInstanceByIdResponse =
+ workflowInstancePage.queryWorkflowInstanceById(loginUser, projectCode, workflowInstanceId);
+ assertTrue(queryWorkflowInstanceByIdResponse.getBody().getSuccess());
+ assertTrue(queryWorkflowInstanceByIdResponse.getBody().getData().toString().contains("test_import"));
}
@Test
@Order(5)
- public void testDeleteProcessInstanceById() {
- HttpResponse deleteProcessInstanceByIdResponse =
- processInstancePage.deleteProcessInstanceById(loginUser, projectCode, processInstanceId);
- assertTrue(deleteProcessInstanceByIdResponse.getBody().getSuccess());
-
- HttpResponse queryProcessInstanceListResponse =
- processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10);
- assertTrue(queryProcessInstanceListResponse.getBody().getSuccess());
- Assertions.assertFalse(queryProcessInstanceListResponse.getBody().getData().toString().contains("test_import"));
+ public void testDeleteWorkflowInstanceById() {
+ HttpResponse deleteWorkflowInstanceByIdResponse =
+ workflowInstancePage.deleteWorkflowInstanceById(loginUser, projectCode, workflowInstanceId);
+ assertTrue(deleteWorkflowInstanceByIdResponse.getBody().getSuccess());
+
+ HttpResponse queryWorkflowInstanceListResponse =
+ workflowInstancePage.queryWorkflowInstanceList(loginUser, projectCode, 1, 10);
+ assertTrue(queryWorkflowInstanceListResponse.getBody().getSuccess());
+ Assertions
+ .assertFalse(queryWorkflowInstanceListResponse.getBody().getData().toString().contains("test_import"));
}
}
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java
index 320457ab85..3afcdbef45 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java
@@ -40,15 +40,15 @@ public class ExecutorPage {
private String sessionId;
- public HttpResponse startProcessInstance(User loginUser,
- long projectCode,
- long processDefinitionCode,
- String scheduleTime,
- FailureStrategy failureStrategy,
- WarningType warningType) {
+ public HttpResponse startWorkflowInstance(User loginUser,
+ long projectCode,
+ long workflowDefinitionCode,
+ String scheduleTime,
+ FailureStrategy failureStrategy,
+ WarningType warningType) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
- params.put("processDefinitionCode", processDefinitionCode);
+ params.put("workflowDefinitionCode", workflowDefinitionCode);
params.put("scheduleTime", scheduleTime);
params.put("failureStrategy", failureStrategy);
params.put("warningType", warningType);
@@ -56,14 +56,14 @@ public class ExecutorPage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/executors/start-process-instance", projectCode);
+ String url = String.format("/projects/%s/executors/start-workflow-instance", projectCode);
return requestClient.post(url, headers, params);
}
- public HttpResponse queryExecutingWorkflow(User loginUser, long projectCode, long processInstanceCode) {
+ public HttpResponse queryExecutingWorkflow(User loginUser, long projectCode, long workflowInstanceCode) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
- params.put("id", processInstanceCode);
+ params.put("id", workflowInstanceCode);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
@@ -71,11 +71,11 @@ public class ExecutorPage {
return requestClient.get(url, headers, params);
}
- public HttpResponse execute(User loginUser, long projectCode, int processInstanceId, ExecuteType executeType) {
+ public HttpResponse execute(User loginUser, long projectCode, int workflowInstanceId, ExecuteType executeType) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("projectCode", projectCode);
- params.put("processInstanceId", processInstanceId);
+ params.put("workflowInstanceId", workflowInstanceId);
params.put("executeType", executeType);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
@@ -85,11 +85,11 @@ public class ExecutorPage {
return requestClient.post(url, headers, params);
}
- public HttpResponse executeTask(User loginUser, long projectCode, int processInstanceId, String startNodeList,
+ public HttpResponse executeTask(User loginUser, long projectCode, int workflowInstanceId, String startNodeList,
TaskDependType taskDependType) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
- params.put("processInstanceId", processInstanceId);
+ params.put("workflowInstanceId", workflowInstanceId);
params.put("startNodeList", startNodeList);
params.put("taskDependType", taskDependType);
Map headers = new HashMap<>();
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java
index d6b3b9a743..6fbc5aac07 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java
@@ -36,11 +36,11 @@ public class SchedulerPage {
private String sessionId;
- public HttpResponse createSchedule(User loginUser, long projectCode, long processDefinitionCode, String schedule) {
+ public HttpResponse createSchedule(User loginUser, long projectCode, long workflowDefinitionCode, String schedule) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("projectCode", projectCode);
- params.put("processDefinitionCode", processDefinitionCode);
+ params.put("workflowDefinitionCode", workflowDefinitionCode);
params.put("schedule", schedule);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowDefinitionPage.java
similarity index 71%
rename from dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java
rename to dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowDefinitionPage.java
index 3f3b715c39..ea6664b377 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowDefinitionPage.java
@@ -36,54 +36,54 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j
@AllArgsConstructor
-public class ProcessDefinitionPage {
+public class WorkflowDefinitionPage {
private String sessionId;
- public CloseableHttpResponse importProcessDefinition(User loginUser, long projectCode, File file) {
+ public CloseableHttpResponse importWorkflowDefinition(User loginUser, long projectCode, File file) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-definition/import", projectCode);
+ String url = String.format("/projects/%s/workflow-definition/import", projectCode);
return requestClient.postWithFile(url, headers, params, file);
}
- public HttpResponse queryAllProcessDefinitionByProjectCode(User loginUser, long projectCode) {
+ public HttpResponse queryAllWorkflowDefinitionByProjectCode(User loginUser, long projectCode) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-definition/all", projectCode);
+ String url = String.format("/projects/%s/workflow-definition/all", projectCode);
return requestClient.get(url, headers, params);
}
- public HttpResponse queryProcessDefinitionByCode(User loginUser, long projectCode, long processDefinitionCode) {
+ public HttpResponse queryWorkflowDefinitionByCode(User loginUser, long projectCode, long workflowDefinitionCode) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-definition/%s", projectCode, processDefinitionCode);
+ String url = String.format("/projects/%s/workflow-definition/%s", projectCode, workflowDefinitionCode);
return requestClient.get(url, headers, params);
}
- public HttpResponse getProcessListByProjectCode(User loginUser, long projectCode) {
+ public HttpResponse getWorkflowListByProjectCode(User loginUser, long projectCode) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-definition/query-process-definition-list", projectCode);
+ String url = String.format("/projects/%s/workflow-definition/query-workflow-definition-list", projectCode);
return requestClient.get(url, headers, params);
}
- public HttpResponse queryProcessDefinitionByName(User loginUser, long projectCode, String name) {
+ public HttpResponse queryWorkflowDefinitionByName(User loginUser, long projectCode, String name) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("name", name);
@@ -91,23 +91,23 @@ public class ProcessDefinitionPage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-definition/query-by-name", projectCode);
+ String url = String.format("/projects/%s/workflow-definition/query-by-name", projectCode);
return requestClient.get(url, headers, params);
}
- public HttpResponse queryProcessDefinitionList(User loginUser, long projectCode) {
+ public HttpResponse queryWorkflowDefinitionList(User loginUser, long projectCode) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-definition/list", projectCode);
+ String url = String.format("/projects/%s/workflow-definition/list", projectCode);
return requestClient.get(url, headers, params);
}
- public HttpResponse releaseProcessDefinition(User loginUser, long projectCode, long code,
- ReleaseState releaseState) {
+ public HttpResponse releaseWorkflowDefinition(User loginUser, long projectCode, long code,
+ ReleaseState releaseState) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("code", code);
@@ -116,11 +116,11 @@ public class ProcessDefinitionPage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-definition/%s/release", projectCode, code);
+ String url = String.format("/projects/%s/workflow-definition/%s/release", projectCode, code);
return requestClient.post(url, headers, params);
}
- public HttpResponse deleteProcessDefinitionByCode(User loginUser, long projectCode, long code) {
+ public HttpResponse deleteWorkflowDefinitionByCode(User loginUser, long projectCode, long code) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("code", code);
@@ -128,7 +128,7 @@ public class ProcessDefinitionPage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-definition/%s", projectCode, code);
+ String url = String.format("/projects/%s/workflow-definition/%s", projectCode, code);
return requestClient.delete(url, headers, params);
}
}
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowInstancePage.java
similarity index 73%
rename from dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java
rename to dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowInstancePage.java
index eba4e63036..433aa6080b 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowInstancePage.java
@@ -32,11 +32,11 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j
@AllArgsConstructor
-public class ProcessInstancePage {
+public class WorkflowInstancePage {
private String sessionId;
- public HttpResponse queryProcessInstancesByTriggerCode(User loginUser, long projectCode, long triggerCode) {
+ public HttpResponse queryWorkflowInstancesByTriggerCode(User loginUser, long projectCode, long triggerCode) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("triggerCode", triggerCode);
@@ -44,11 +44,11 @@ public class ProcessInstancePage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-instances/trigger", projectCode);
+ String url = String.format("/projects/%s/workflow-instances/trigger", projectCode);
return requestClient.get(url, headers, params);
}
- public HttpResponse queryProcessInstanceList(User loginUser, long projectCode, int pageNo, int pageSize) {
+ public HttpResponse queryWorkflowInstanceList(User loginUser, long projectCode, int pageNo, int pageSize) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("pageNo", pageNo);
@@ -57,39 +57,39 @@ public class ProcessInstancePage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-instances", projectCode);
+ String url = String.format("/projects/%s/workflow-instances", projectCode);
return requestClient.get(url, headers, params);
}
- public HttpResponse queryTaskListByProcessId(User loginUser, long projectCode, long processInstanceId) {
+ public HttpResponse queryTaskListByWorkflowInstanceId(User loginUser, long projectCode, long workflowInstanceId) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-instances/%s/tasks", projectCode, processInstanceId);
+ String url = String.format("/projects/%s/workflow-instances/%s/tasks", projectCode, workflowInstanceId);
return requestClient.get(url, headers, params);
}
- public HttpResponse queryProcessInstanceById(User loginUser, long projectCode, long processInstanceId) {
+ public HttpResponse queryWorkflowInstanceById(User loginUser, long projectCode, long workflowInstanceId) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-instances/%s", projectCode, processInstanceId);
+ String url = String.format("/projects/%s/workflow-instances/%s", projectCode, workflowInstanceId);
return requestClient.get(url, headers, params);
}
- public HttpResponse deleteProcessInstanceById(User loginUser, long projectCode, long processInstanceId) {
+ public HttpResponse deleteWorkflowInstanceById(User loginUser, long projectCode, long workflowInstanceId) {
Map params = new HashMap<>();
params.put("loginUser", loginUser);
Map headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
- String url = String.format("/projects/%s/process-instances/%s", projectCode, processInstanceId);
+ String url = String.format("/projects/%s/workflow-instances/%s", projectCode, workflowInstanceId);
return requestClient.delete(url, headers, params);
}
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/workflow-json/test.json b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/workflow-json/test.json
index ae11e05f38..9601a93a9b 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/workflow-json/test.json
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/workflow-json/test.json
@@ -1,5 +1,5 @@
[ {
- "processDefinition" : {
+ "workflowDefinition" : {
"id" : 1,
"code" : 9752686452032,
"name" : "test",
@@ -23,12 +23,12 @@
"warningGroupId" : null,
"executionType" : "PARALLEL"
},
- "processTaskRelationList" : [ {
+ "workflowTaskRelationList" : [ {
"id" : 1,
"name" : "",
- "processDefinitionVersion" : 1,
+ "workflowDefinitionVersion" : 1,
"projectCode" : 9752680865600,
- "processDefinitionCode" : 9752686452032,
+ "workflowDefinitionCode" : 9752686452032,
"preTaskCode" : 0,
"preTaskVersion" : 0,
"postTaskCode" : 9752684723008,
@@ -78,4 +78,4 @@
"operateTime" : "2023-06-01 20:41:02"
} ],
"schedule" : null
-} ]
\ No newline at end of file
+} ]
diff --git a/dolphinscheduler-api-test/pom.xml b/dolphinscheduler-api-test/pom.xml
index 94c7603680..095cefd64c 100644
--- a/dolphinscheduler-api-test/pom.xml
+++ b/dolphinscheduler-api-test/pom.xml
@@ -31,8 +31,8 @@
- 8
- 8
+ 11
+ 11
UTF-8
5.7.2
@@ -46,6 +46,7 @@
31.0.1-jre
2.13.2
3.1.2
+ 2.2.0
@@ -65,6 +66,14 @@
junit-jupiter
+
+ org.junit-pioneer
+ junit-pioneer
+ ${junit-pioneer.version}
+ test
+
+
+
org.testcontainers
testcontainers
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java
index f0f749aea7..b7200adc13 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java
@@ -26,10 +26,9 @@ public final class AuditLogConstants {
public static final String CODE = "code";
public static final String CODES = "codes";
public static final String VERSION = "version";
- public static final String PROCESS_DEFINITION_CODE = "processDefinitionCode";
- public static final String PROCESS_DEFINITION_CODES = "processDefinitionCodes";
- public static final String PROCESS_INSTANCE_IDS = "processInstanceIds";
- public static final String PROCESS_INSTANCE_ID = "processInstanceId";
+ public static final String WORKFLOW_DEFINITION_CODES = "workflowDefinitionCodes";
+ public static final String WORKFLOW_INSTANCE_IDS = "workflowInstanceIds";
+ public static final String WORKFLOW_INSTANCE_ID = "workflowInstanceId";
public static final String WORKFLOW_DEFINITION_CODE = "workflowDefinitionCode";
public static final String TYPE = "type";
public static final String NAME = "name";
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java
index 8c41bf14cc..0a1ace075b 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java
@@ -27,15 +27,14 @@ import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.NAME;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PRIORITY;
-import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_DEFINITION_CODE;
-import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_DEFINITION_CODES;
-import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_INSTANCE_ID;
-import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_INSTANCE_IDS;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.QUEUE_ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.TYPE;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.USER_ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.VERSION;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_DEFINITION_CODE;
+import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_DEFINITION_CODES;
+import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_INSTANCE_ID;
+import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_INSTANCE_IDS;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.ALARM_GROUP;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.ALARM_INSTANCE;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.CLUSTER;
@@ -76,6 +75,7 @@ import static org.apache.dolphinscheduler.common.enums.AuditOperationType.START;
import static org.apache.dolphinscheduler.common.enums.AuditOperationType.SWITCH_VERSION;
import static org.apache.dolphinscheduler.common.enums.AuditOperationType.UPDATE;
+import org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants;
import org.apache.dolphinscheduler.api.audit.operator.AuditOperator;
import org.apache.dolphinscheduler.api.audit.operator.impl.AlertGroupAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.AlertInstanceAuditOperatorImpl;
@@ -83,7 +83,6 @@ import org.apache.dolphinscheduler.api.audit.operator.impl.ClusterAuditOperatorI
import org.apache.dolphinscheduler.api.audit.operator.impl.DatasourceAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.EnvironmentAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.K8SNamespaceAuditOperatorImpl;
-import org.apache.dolphinscheduler.api.audit.operator.impl.ProcessInstanceAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.ProjectAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.ResourceAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.ScheduleAuditOperatorImpl;
@@ -95,6 +94,7 @@ import org.apache.dolphinscheduler.api.audit.operator.impl.TokenAuditOperatorImp
import org.apache.dolphinscheduler.api.audit.operator.impl.UserAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.WorkerGroupAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.WorkflowAuditOperatorImpl;
+import org.apache.dolphinscheduler.api.audit.operator.impl.WorkflowInstanceAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.YarnQueueAuditOperatorImpl;
import org.apache.dolphinscheduler.common.enums.AuditModelType;
import org.apache.dolphinscheduler.common.enums.AuditOperationType;
@@ -120,22 +120,24 @@ public enum AuditType {
WORKFLOW_EXPORT(WORKFLOW, EXPORT, WorkflowAuditOperatorImpl.class, new String[]{CODES}, new String[]{}),
WORKFLOW_DELETE(WORKFLOW, DELETE, WorkflowAuditOperatorImpl.class, new String[]{CODE}, new String[]{}),
WORKFLOW_BATCH_DELETE(WORKFLOW, BATCH_DELETE, WorkflowAuditOperatorImpl.class, new String[]{CODES}, new String[]{}),
- WORKFLOW_START(WORKFLOW, START, WorkflowAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODE},
+ WORKFLOW_START(WORKFLOW, START, WorkflowAuditOperatorImpl.class,
+ new String[]{AuditLogConstants.WORKFLOW_DEFINITION_CODE},
new String[]{}),
- WORKFLOW_BATCH_START(WORKFLOW, BATCH_START, WorkflowAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODES},
+ WORKFLOW_BATCH_START(WORKFLOW, BATCH_START, WorkflowAuditOperatorImpl.class,
+ new String[]{WORKFLOW_DEFINITION_CODES},
new String[]{}),
- WORKFLOW_BATCH_RERUN(WORKFLOW, BATCH_RERUN, ProcessInstanceAuditOperatorImpl.class,
- new String[]{PROCESS_INSTANCE_IDS},
+ WORKFLOW_BATCH_RERUN(WORKFLOW, BATCH_RERUN, WorkflowInstanceAuditOperatorImpl.class,
+ new String[]{WORKFLOW_INSTANCE_IDS},
new String[]{}),
- WORKFLOW_EXECUTE(WORKFLOW, EXECUTE, ProcessInstanceAuditOperatorImpl.class, new String[]{PROCESS_INSTANCE_ID},
+ WORKFLOW_EXECUTE(WORKFLOW, EXECUTE, WorkflowInstanceAuditOperatorImpl.class, new String[]{WORKFLOW_INSTANCE_ID},
new String[]{}),
WORKFLOW_IMPORT(WORKFLOW, IMPORT, WorkflowAuditOperatorImpl.class, new String[]{}, new String[]{CODE}),
- WORKFLOW_INSTANCE_UPDATE(WORKFLOW_INSTANCE, UPDATE, ProcessInstanceAuditOperatorImpl.class, new String[]{ID},
+ WORKFLOW_INSTANCE_UPDATE(WORKFLOW_INSTANCE, UPDATE, WorkflowInstanceAuditOperatorImpl.class, new String[]{ID},
new String[]{}),
- WORKFLOW_INSTANCE_DELETE(WORKFLOW_INSTANCE, DELETE, ProcessInstanceAuditOperatorImpl.class, new String[]{ID},
+ WORKFLOW_INSTANCE_DELETE(WORKFLOW_INSTANCE, DELETE, WorkflowInstanceAuditOperatorImpl.class, new String[]{ID},
new String[]{}),
- WORKFLOW_INSTANCE_BATCH_DELETE(WORKFLOW_INSTANCE, BATCH_DELETE, ProcessInstanceAuditOperatorImpl.class,
- new String[]{PROCESS_INSTANCE_IDS}, new String[]{}),
+ WORKFLOW_INSTANCE_BATCH_DELETE(WORKFLOW_INSTANCE, BATCH_DELETE, WorkflowInstanceAuditOperatorImpl.class,
+ new String[]{WORKFLOW_INSTANCE_IDS}, new String[]{}),
TASK_CREATE(TASK, CREATE, TaskAuditOperatorImpl.class, new String[]{}, new String[]{CODE}),
TASK_UPDATE(TASK, UPDATE, TaskAuditOperatorImpl.class, new String[]{}, new String[]{CODE}),
@@ -147,7 +149,8 @@ public enum AuditType {
TASK_INSTANCE_FORCE_SUCCESS(TASK_INSTANCE, FORCE_SUCCESS, TaskInstancesAuditOperatorImpl.class, new String[]{ID},
new String[]{}),
- SCHEDULE_CREATE(SCHEDULE, CREATE, ScheduleAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODE},
+ SCHEDULE_CREATE(SCHEDULE, CREATE, ScheduleAuditOperatorImpl.class,
+ new String[]{AuditLogConstants.WORKFLOW_DEFINITION_CODE},
new String[]{ID}),
SCHEDULE_UPDATE(SCHEDULE, UPDATE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}),
SCHEDULE_ONLINE(SCHEDULE, ONLINE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}),
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java
index be8d774f8a..8365a61c20 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java
@@ -51,7 +51,7 @@ public class ScheduleAuditOperatorImpl extends BaseAuditOperator {
int id = (int) paramsMap.get(paramNameArr[0]);
Schedule schedule = scheduleMapper.selectById(id);
if (schedule != null) {
- paramsMap.put(AuditLogConstants.CODE, schedule.getProcessDefinitionCode());
+ paramsMap.put(AuditLogConstants.CODE, schedule.getWorkflowDefinitionCode());
paramNameArr[0] = AuditLogConstants.CODE;
auditLogList.forEach(auditLog -> auditLog.setDetail(String.valueOf(id)));
}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowAuditOperatorImpl.java
index b0fb86faa6..5369fe24eb 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowAuditOperatorImpl.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowAuditOperatorImpl.java
@@ -49,8 +49,8 @@ public class WorkflowAuditOperatorImpl extends BaseAuditOperator {
protected void setObjectByParam(String[] paramNameArr, Map paramsMap,
List auditLogList) {
if (paramNameArr[0].equals(AuditLogConstants.CODES)
- || paramNameArr[0].equals(AuditLogConstants.PROCESS_DEFINITION_CODES)
- || paramNameArr[0].equals(AuditLogConstants.PROCESS_INSTANCE_IDS)) {
+ || paramNameArr[0].equals(AuditLogConstants.WORKFLOW_DEFINITION_CODES)
+ || paramNameArr[0].equals(AuditLogConstants.WORKFLOW_INSTANCE_IDS)) {
super.setObjectByParamArr(paramNameArr, paramsMap, auditLogList);
} else {
super.setObjectByParam(paramNameArr, paramsMap, auditLogList);
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessInstanceAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowInstanceAuditOperatorImpl.java
similarity index 94%
rename from dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessInstanceAuditOperatorImpl.java
rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowInstanceAuditOperatorImpl.java
index 2b57da007e..fec46ac149 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessInstanceAuditOperatorImpl.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowInstanceAuditOperatorImpl.java
@@ -35,7 +35,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
-public class ProcessInstanceAuditOperatorImpl extends BaseAuditOperator {
+public class WorkflowInstanceAuditOperatorImpl extends BaseAuditOperator {
@Autowired
private WorkflowInstanceMapper workflowInstanceMapper;
@@ -50,7 +50,7 @@ public class ProcessInstanceAuditOperatorImpl extends BaseAuditOperator {
@Override
protected void setObjectByParam(String[] paramNameArr, Map paramsMap,
List auditLogList) {
- if (paramNameArr[0].equals(AuditLogConstants.PROCESS_INSTANCE_IDS)) {
+ if (paramNameArr[0].equals(AuditLogConstants.WORKFLOW_INSTANCE_IDS)) {
super.setObjectByParamArr(paramNameArr, paramsMap, auditLogList);
} else {
super.setObjectByParam(paramNameArr, paramsMap, auditLogList);
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java
index 98a871bf9e..ee263b865f 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java
@@ -86,13 +86,13 @@ public class DataAnalysisController extends BaseController {
dataAnalysisService.getTaskInstanceStateCountByProject(loginUser, projectCode, startDate, endDate));
}
- @Operation(summary = "countProcessInstanceState", description = "COUNT_WORKFLOW_INSTANCE_NOTES")
+ @Operation(summary = "countWorkflowInstanceState", description = "COUNT_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)),
@Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)),
@Parameter(name = "projectCode", description = "PROJECT_CODE", schema = @Schema(implementation = long.class, example = "100"))
})
- @GetMapping(value = "/process-state-count")
+ @GetMapping(value = "/workflow-state-count")
@ResponseStatus(HttpStatus.OK)
@ApiException(COUNT_WORKFLOW_INSTANCE_STATE_ERROR)
public Result getWorkflowInstanceStateCount(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
index 5866e2e25d..ad7254f004 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
@@ -85,10 +85,10 @@ public class ExecutorController extends BaseController {
private ExecutorService execService;
/**
- * execute process instance
+ * execute workflow instance
*
* @param loginUser login user
- * @param processDefinitionCode process definition code
+ * @param workflowDefinitionCode workflow definition code
* @param scheduleTime schedule time when CommandType is COMPLEMENT_DATA there are two ways to transfer parameters 1.date range, for example:{"complementStartDate":"2022-01-01 12:12:12","complementEndDate":"2022-01-6 12:12:12"} 2.manual input, for example:{"complementScheduleDateList":"2022-01-01 00:00:00,2022-01-02 12:12:12,2022-01-03 12:12:12"}
* @param failureStrategy failure strategy
* @param startNodeList start nodes list
@@ -97,16 +97,16 @@ public class ExecutorController extends BaseController {
* @param warningType warning type
* @param warningGroupId warning group id
* @param runMode run mode
- * @param processInstancePriority process instance priority
+ * @param workflowInstancePriority workflow instance priority
* @param workerGroup worker group
* @param expectedParallelismNumber the expected parallelism number when execute complement in parallel mode
* @param testFlag testFlag
* @param executionOrder complement data in some kind of order
- * @return start process result code
+ * @return start workflow result code
*/
- @Operation(summary = "startProcessInstance", description = "RUN_WORKFLOW_INSTANCE_NOTES")
+ @Operation(summary = "startWorkflowInstance", description = "RUN_WORKFLOW_INSTANCE_NOTES")
@Parameters({
- @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = Long.class), example = "100"),
+ @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = Long.class), example = "100"),
@Parameter(name = "scheduleTime", description = "SCHEDULE_TIME", required = true, schema = @Schema(implementation = String.class), example = "2022-04-06 00:00:00,2022-04-06 00:00:00"),
@Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", required = true, schema = @Schema(implementation = FailureStrategy.class)),
@Parameter(name = "startNodeList", description = "START_NODE_LIST", schema = @Schema(implementation = String.class)),
@@ -115,7 +115,7 @@ public class ExecutorController extends BaseController {
@Parameter(name = "warningType", description = "WARNING_TYPE", required = true, schema = @Schema(implementation = WarningType.class)),
@Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "runMode", description = "RUN_MODE", schema = @Schema(implementation = RunMode.class)),
- @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)),
+ @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)),
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = Long.class, example = "-1")),
@@ -127,12 +127,12 @@ public class ExecutorController extends BaseController {
@Parameter(name = "allLevelDependent", description = "ALL_LEVEL_DEPENDENT", schema = @Schema(implementation = boolean.class, example = "false")),
@Parameter(name = "executionOrder", description = "EXECUTION_ORDER", schema = @Schema(implementation = ExecutionOrder.class))
})
- @PostMapping(value = "start-process-instance")
+ @PostMapping(value = "start-workflow-instance")
@ResponseStatus(HttpStatus.OK)
@ApiException(START_WORKFLOW_INSTANCE_ERROR)
@OperatorLog(auditType = AuditType.WORKFLOW_START)
public Result> triggerWorkflowDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
- @RequestParam(value = "processDefinitionCode") long processDefinitionCode,
+ @RequestParam(value = "workflowDefinitionCode") long workflowDefinitionCode,
@RequestParam(value = "scheduleTime") String scheduleTime,
@RequestParam(value = "failureStrategy") FailureStrategy failureStrategy,
@RequestParam(value = "startNodeList", required = false) String startNodeList,
@@ -141,7 +141,7 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "warningType") WarningType warningType,
@RequestParam(value = "warningGroupId", required = false) Integer warningGroupId,
@RequestParam(value = "runMode", required = false) RunMode runMode,
- @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority,
+ @RequestParam(value = "workflowInstancePriority", required = false) Priority workflowInstancePriority,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode,
@@ -156,14 +156,14 @@ public class ExecutorController extends BaseController {
case START_PROCESS:
final WorkflowTriggerRequest workflowTriggerRequest = WorkflowTriggerRequest.builder()
.loginUser(loginUser)
- .workflowDefinitionCode(processDefinitionCode)
+ .workflowDefinitionCode(workflowDefinitionCode)
.startNodes(startNodeList)
.failureStrategy(failureStrategy)
.taskDependType(taskDependType)
.execType(execType)
.warningType(warningType)
.warningGroupId(warningGroupId)
- .workflowInstancePriority(processInstancePriority)
+ .workflowInstancePriority(workflowInstancePriority)
.workerGroup(workerGroup)
.tenantCode(tenantCode)
.environmentCode(environmentCode)
@@ -176,7 +176,7 @@ public class ExecutorController extends BaseController {
case COMPLEMENT_DATA:
final WorkflowBackFillRequest workflowBackFillRequest = WorkflowBackFillRequest.builder()
.loginUser(loginUser)
- .workflowDefinitionCode(processDefinitionCode)
+ .workflowDefinitionCode(workflowDefinitionCode)
.startNodes(startNodeList)
.failureStrategy(failureStrategy)
.taskDependType(taskDependType)
@@ -184,7 +184,7 @@ public class ExecutorController extends BaseController {
.warningType(warningType)
.warningGroupId(warningGroupId)
.backfillRunMode(runMode)
- .workflowInstancePriority(processInstancePriority)
+ .workflowInstancePriority(workflowInstancePriority)
.workerGroup(workerGroup)
.tenantCode(tenantCode)
.environmentCode(environmentCode)
@@ -204,12 +204,12 @@ public class ExecutorController extends BaseController {
}
/**
- * batch execute process instance
- * If any processDefinitionCode cannot be found, the failure information is returned and the status is set to
+ * batch execute workflow instance
+ * If any workflowDefinitionCode cannot be found, the failure information is returned and the status is set to
* failed. The successful task will run normally and will not stop
*
* @param loginUser login user
- * @param processDefinitionCodes process definition codes
+ * @param workflowDefinitionCodes workflow definition codes
* @param scheduleTime schedule time
* @param failureStrategy failure strategy
* @param startNodeList start nodes list
@@ -218,17 +218,17 @@ public class ExecutorController extends BaseController {
* @param warningType warning type
* @param warningGroupId warning group id
* @param runMode run mode
- * @param processInstancePriority process instance priority
+ * @param workflowInstancePriority workflow instance priority
* @param workerGroup worker group
* @param tenantCode tenant code
* @param expectedParallelismNumber the expected parallelism number when execute complement in parallel mode
* @param testFlag testFlag
* @param executionOrder complement data in some kind of order
- * @return start process result code
+ * @return start workflow result code
*/
- @Operation(summary = "batchStartProcessInstance", description = "BATCH_RUN_WORKFLOW_INSTANCE_NOTES")
+ @Operation(summary = "batchStartWorkflowInstance", description = "BATCH_RUN_WORKFLOW_INSTANCE_NOTES")
@Parameters({
- @Parameter(name = "processDefinitionCodes", description = "WORKFLOW_DEFINITION_CODE_LIST", required = true, schema = @Schema(implementation = String.class, example = "1,2,3")),
+ @Parameter(name = "workflowDefinitionCodes", description = "WORKFLOW_DEFINITION_CODE_LIST", required = true, schema = @Schema(implementation = String.class, example = "1,2,3")),
@Parameter(name = "scheduleTime", description = "SCHEDULE_TIME", required = true, schema = @Schema(implementation = String.class, example = "2022-04-06 00:00:00,2022-04-06 00:00:00")),
@Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", required = true, schema = @Schema(implementation = FailureStrategy.class)),
@Parameter(name = "startNodeList", description = "START_NODE_LIST", schema = @Schema(implementation = String.class)),
@@ -237,7 +237,7 @@ public class ExecutorController extends BaseController {
@Parameter(name = "warningType", description = "WARNING_TYPE", required = true, schema = @Schema(implementation = WarningType.class)),
@Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "runMode", description = "RUN_MODE", schema = @Schema(implementation = RunMode.class)),
- @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)),
+ @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)),
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = Long.class, example = "-1")),
@@ -248,12 +248,12 @@ public class ExecutorController extends BaseController {
@Parameter(name = "allLevelDependent", description = "ALL_LEVEL_DEPENDENT", schema = @Schema(implementation = boolean.class, example = "false")),
@Parameter(name = "executionOrder", description = "EXECUTION_ORDER", schema = @Schema(implementation = ExecutionOrder.class))
})
- @PostMapping(value = "batch-start-process-instance")
+ @PostMapping(value = "batch-start-workflow-instance")
@ResponseStatus(HttpStatus.OK)
@ApiException(BATCH_START_WORKFLOW_INSTANCE_ERROR)
@OperatorLog(auditType = AuditType.WORKFLOW_BATCH_START)
public Result> batchTriggerWorkflowDefinitions(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
- @RequestParam(value = "processDefinitionCodes") String processDefinitionCodes,
+ @RequestParam(value = "workflowDefinitionCodes") String workflowDefinitionCodes,
@RequestParam(value = "scheduleTime") String scheduleTime,
@RequestParam(value = "failureStrategy") FailureStrategy failureStrategy,
@RequestParam(value = "startNodeList", required = false) String startNodeList,
@@ -262,7 +262,7 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "warningType") WarningType warningType,
@RequestParam(value = "warningGroupId", required = false) Integer warningGroupId,
@RequestParam(value = "runMode", required = false) RunMode runMode,
- @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority,
+ @RequestParam(value = "workflowInstancePriority", required = false) Priority workflowInstancePriority,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode,
@@ -273,12 +273,12 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "complementDependentMode", required = false) ComplementDependentMode complementDependentMode,
@RequestParam(value = "allLevelDependent", required = false, defaultValue = "false") boolean allLevelDependent,
@RequestParam(value = "executionOrder", required = false) ExecutionOrder executionOrder) {
- List workflowDefinitionCodes = Arrays.stream(processDefinitionCodes.split(Constants.COMMA))
+ List workflowDefinitionCodeList = Arrays.stream(workflowDefinitionCodes.split(Constants.COMMA))
.map(Long::parseLong)
.collect(Collectors.toList());
List result = new ArrayList<>();
- for (Long workflowDefinitionCode : workflowDefinitionCodes) {
+ for (Long workflowDefinitionCode : workflowDefinitionCodeList) {
Result> workflowInstanceIds = triggerWorkflowDefinition(loginUser,
workflowDefinitionCode,
scheduleTime,
@@ -289,7 +289,7 @@ public class ExecutorController extends BaseController {
warningType,
warningGroupId,
runMode,
- processInstancePriority,
+ workflowInstancePriority,
workerGroup,
tenantCode,
environmentCode,
@@ -306,11 +306,11 @@ public class ExecutorController extends BaseController {
}
/**
- * do action to process instance: pause, stop, repeat, recover from pause, recover from stop
+ * do action to workflow instance: pause, stop, repeat, recover from pause, recover from stop
*/
@Operation(summary = "execute", description = "EXECUTE_ACTION_TO_WORKFLOW_INSTANCE_NOTES")
@Parameters({
- @Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
+ @Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "executeType", description = "EXECUTE_TYPE", required = true, schema = @Schema(implementation = ExecuteType.class))
})
@PostMapping(value = "/execute")
@@ -318,24 +318,24 @@ public class ExecutorController extends BaseController {
@ApiException(EXECUTE_WORKFLOW_INSTANCE_ERROR)
@OperatorLog(auditType = AuditType.WORKFLOW_EXECUTE)
public Result controlWorkflowInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
- @RequestParam("processInstanceId") Integer processInstanceId,
+ @RequestParam("workflowInstanceId") Integer workflowInstanceId,
@RequestParam("executeType") ExecuteType executeType) {
- execService.controlWorkflowInstance(loginUser, processInstanceId, executeType);
+ execService.controlWorkflowInstance(loginUser, workflowInstanceId, executeType);
return Result.success();
}
/**
- * batch execute and do action to process instance
+ * batch execute and do action to workflow instance
*
* @param loginUser login user
- * @param processInstanceIds process instance ids, delimiter by "," if more than one id
+ * @param workflowInstanceIds workflow instance ids, delimiter by "," if more than one id
* @param executeType execute type
* @return execute result code
*/
@Operation(summary = "batchExecute", description = "BATCH_EXECUTE_ACTION_TO_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = int.class)),
- @Parameter(name = "processInstanceIds", description = "PROCESS_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)),
+ @Parameter(name = "workflowInstanceIds", description = "WORKFLOW_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "executeType", description = "EXECUTE_TYPE", required = true, schema = @Schema(implementation = ExecuteType.class))
})
@PostMapping(value = "/batch-execute")
@@ -343,20 +343,20 @@ public class ExecutorController extends BaseController {
@ApiException(BATCH_EXECUTE_WORKFLOW_INSTANCE_ERROR)
@OperatorLog(auditType = AuditType.WORKFLOW_BATCH_RERUN)
public Result batchControlWorkflowInstance(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
- @RequestParam("processInstanceIds") String processInstanceIds,
+ @RequestParam("workflowInstanceIds") String workflowInstanceIds,
@RequestParam("executeType") ExecuteType executeType) {
- String[] processInstanceIdArray = processInstanceIds.split(Constants.COMMA);
+ String[] workflowInstanceIdArray = workflowInstanceIds.split(Constants.COMMA);
List errorMessage = new ArrayList<>();
- for (String strProcessInstanceId : processInstanceIdArray) {
- int processInstanceId = Integer.parseInt(strProcessInstanceId);
+ for (String strWorkflowInstanceId : workflowInstanceIdArray) {
+ int workflowInstanceId = Integer.parseInt(strWorkflowInstanceId);
try {
- execService.controlWorkflowInstance(loginUser, processInstanceId, executeType);
- log.info("Success do action {} on workflowInstance: {}", executeType, processInstanceId);
+ execService.controlWorkflowInstance(loginUser, workflowInstanceId, executeType);
+ log.info("Success do action {} on workflowInstance: {}", executeType, workflowInstanceId);
} catch (Exception e) {
- errorMessage.add("Failed do action " + executeType + " on workflowInstance: " + processInstanceId
+ errorMessage.add("Failed do action " + executeType + " on workflowInstance: " + workflowInstanceId
+ "reason: " + e.getMessage());
- log.error("Failed do action {} on workflowInstance: {}, error: {}", executeType, processInstanceId, e);
+ log.error("Failed do action {} on workflowInstance: {}, error: {}", executeType, workflowInstanceId, e);
}
}
if (org.apache.commons.collections4.CollectionUtils.isNotEmpty(errorMessage)) {
@@ -417,18 +417,18 @@ public class ExecutorController extends BaseController {
}
/**
- * do action to process instance: pause, stop, repeat, recover from pause, recover from stop
+ * do action to workflow instance: pause, stop, repeat, recover from pause, recover from stop
*
* @param loginUser login user
* @param projectCode project code
- * @param processInstanceId process instance id
+ * @param workflowInstanceId workflow instance id
* @param startNodeList start node list
* @param taskDependType task depend type
* @return execute result code
*/
@Operation(summary = "execute-task", description = "EXECUTE_ACTION_TO_WORKFLOW_INSTANCE_NOTES")
@Parameters({
- @Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
+ @Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "startNodeList", description = "START_NODE_LIST", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "taskDependType", description = "TASK_DEPEND_TYPE", required = true, schema = @Schema(implementation = TaskDependType.class))
})
@@ -438,13 +438,13 @@ public class ExecutorController extends BaseController {
@OperatorLog(auditType = AuditType.WORKFLOW_EXECUTE)
public Result executeTask(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
- @RequestParam("processInstanceId") Integer processInstanceId,
+ @RequestParam("workflowInstanceId") Integer workflowInstanceId,
@RequestParam("startNodeList") String startNodeList,
@RequestParam("taskDependType") TaskDependType taskDependType) {
- log.info("Start to execute task in process instance, projectCode:{}, processInstanceId:{}.",
+ log.info("Start to execute task in workflow instance, projectCode:{}, workflowInstanceId:{}.",
projectCode,
- processInstanceId);
- return execService.executeTask(loginUser, projectCode, processInstanceId, startNodeList, taskDependType);
+ workflowInstanceId);
+ return execService.executeTask(loginUser, projectCode, workflowInstanceId, startNodeList, taskDependType);
}
}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
index f95f369d07..30c489d216 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
@@ -71,7 +71,7 @@ public class SchedulerController extends BaseController {
public static final String DEFAULT_WARNING_TYPE = "NONE";
public static final String DEFAULT_NOTIFY_GROUP_ID = "1";
public static final String DEFAULT_FAILURE_POLICY = "CONTINUE";
- public static final String DEFAULT_PROCESS_INSTANCE_PRIORITY = "MEDIUM";
+ public static final String DEFAULT_WORKFLOW_INSTANCE_PRIORITY = "MEDIUM";
@Autowired
private SchedulerService schedulerService;
@@ -81,19 +81,19 @@ public class SchedulerController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
- * @param processDefinitionCode process definition code
+ * @param workflowDefinitionCode workflow definition code
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
- * @param processInstancePriority process instance priority
+ * @param workflowInstancePriority workflow instance priority
* @param workerGroup worker group
* @param tenantCode tenant code
* @return create result code
*/
@Operation(summary = "createSchedule", description = "CREATE_SCHEDULE_NOTES")
@Parameters({
- @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
+ @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
@Parameter(name = "schedule", description = "SCHEDULE", schema = @Schema(implementation = String.class, example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','timezoneId':'America/Phoenix','crontab':'0 0 3/6 * * ? *'}")),
@Parameter(name = "warningType", description = "WARNING_TYPE", schema = @Schema(implementation = WarningType.class)),
@Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", schema = @Schema(implementation = int.class, example = "100")),
@@ -101,7 +101,7 @@ public class SchedulerController extends BaseController {
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = long.class)),
- @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
+ @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
})
@PostMapping()
@ResponseStatus(HttpStatus.CREATED)
@@ -109,7 +109,7 @@ public class SchedulerController extends BaseController {
@OperatorLog(auditType = AuditType.SCHEDULE_CREATE)
public Result createSchedule(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
- @RequestParam(value = "processDefinitionCode") long processDefinitionCode,
+ @RequestParam(value = "workflowDefinitionCode") long workflowDefinitionCode,
@RequestParam(value = "schedule") String schedule,
@RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType,
@RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId,
@@ -117,16 +117,16 @@ public class SchedulerController extends BaseController {
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode,
- @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) {
+ @RequestParam(value = "workflowInstancePriority", required = false, defaultValue = DEFAULT_WORKFLOW_INSTANCE_PRIORITY) Priority workflowInstancePriority) {
Map result = schedulerService.insertSchedule(
loginUser,
projectCode,
- processDefinitionCode,
+ workflowDefinitionCode,
schedule,
warningType,
warningGroupId,
failureStrategy,
- processInstancePriority,
+ workflowInstancePriority,
workerGroup,
tenantCode,
environmentCode);
@@ -146,7 +146,7 @@ public class SchedulerController extends BaseController {
* @param failureStrategy failure strategy
* @param workerGroup worker group
* @param tenantCode tenant code
- * @param processInstancePriority process instance priority
+ * @param workflowInstancePriority workflow instance priority
* @return update result code
*/
@Operation(summary = "updateSchedule", description = "UPDATE_SCHEDULE_NOTES")
@@ -158,7 +158,7 @@ public class SchedulerController extends BaseController {
@Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", schema = @Schema(implementation = FailureStrategy.class)),
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
- @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
+ @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = long.class)),
})
@PutMapping("/{id}")
@@ -175,10 +175,10 @@ public class SchedulerController extends BaseController {
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode,
- @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) {
+ @RequestParam(value = "workflowInstancePriority", required = false, defaultValue = DEFAULT_WORKFLOW_INSTANCE_PRIORITY) Priority workflowInstancePriority) {
Map result = schedulerService.updateSchedule(loginUser, projectCode, id, schedule,
- warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, tenantCode,
+ warningType, warningGroupId, failureStrategy, workflowInstancePriority, workerGroup, tenantCode,
environmentCode);
return returnDataList(result);
}
@@ -216,7 +216,7 @@ public class SchedulerController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
- * @param processDefinitionCode process definition code
+ * @param workflowDefinitionCode workflow definition code
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
@@ -224,7 +224,7 @@ public class SchedulerController extends BaseController {
*/
@Operation(summary = "queryScheduleListPaging", description = "QUERY_SCHEDULE_LIST_PAGING_NOTES")
@Parameters({
- @Parameter(name = "processDefinitionId", description = "WORKFLOW_DEFINITION_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
+
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
@Parameter(name = "pageNo", description = "PAGE_NO", schema = @Schema(implementation = int.class, example = "1")),
@Parameter(name = "pageSize", description = "PAGE_SIZE", schema = @Schema(implementation = int.class, example = "20"))
@@ -233,13 +233,13 @@ public class SchedulerController extends BaseController {
@ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR)
public Result queryScheduleListPaging(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
- @RequestParam(value = "processDefinitionCode", required = false, defaultValue = "0") long processDefinitionCode,
+ @RequestParam(value = "workflowDefinitionCode", required = false, defaultValue = "0") long workflowDefinitionCode,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) {
checkPageParams(pageNo, pageSize);
searchVal = ParameterUtils.handleEscapes(searchVal);
- return schedulerService.querySchedule(loginUser, projectCode, processDefinitionCode, searchVal, pageNo,
+ return schedulerService.querySchedule(loginUser, projectCode, workflowDefinitionCode, searchVal, pageNo,
pageSize);
}
@@ -304,49 +304,49 @@ public class SchedulerController extends BaseController {
}
/**
- * update process definition schedule
+ * update workflow definition schedule
*
* @param loginUser login user
* @param projectCode project code
- * @param processDefinitionCode process definition code
+ * @param workflowDefinitionCode workflow definition code
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workerGroup worker group
- * @param processInstancePriority process instance priority
+ * @param workflowInstancePriority workflow instance priority
* @return update result code
*/
- @Operation(summary = "updateScheduleByWorkflowDefinitionCode", description = "UPDATE_SCHEDULE_BY_PROCESS_DEFINITION_CODE_NOTES")
+ @Operation(summary = "updateScheduleByWorkflowDefinitionCode", description = "UPDATE_SCHEDULE_BY_WORKFLOW_DEFINITION_CODE_NOTES")
@Parameters({
- @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "12345678")),
+ @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "12345678")),
@Parameter(name = "schedule", description = "SCHEDULE", schema = @Schema(implementation = String.class, example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")),
@Parameter(name = "warningType", description = "WARNING_TYPE", schema = @Schema(implementation = WarningType.class)),
@Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", schema = @Schema(implementation = FailureStrategy.class)),
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
- @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
+ @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = long.class)),
})
@PutMapping("/update/{code}")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_SCHEDULE_ERROR)
@OperatorLog(auditType = AuditType.SCHEDULE_UPDATE)
- public Result updateScheduleByProcessDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
- @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
- @PathVariable(value = "code") long processDefinitionCode,
- @RequestParam(value = "schedule") String schedule,
- @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType,
- @RequestParam(value = "warningGroupId", required = false) int warningGroupId,
- @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy,
- @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
- @RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
- @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") long environmentCode,
- @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) {
+ public Result updateScheduleByWorkflowDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
+ @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
+ @PathVariable(value = "code") long workflowDefinitionCode,
+ @RequestParam(value = "schedule") String schedule,
+ @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType,
+ @RequestParam(value = "warningGroupId", required = false) int warningGroupId,
+ @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy,
+ @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
+ @RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
+ @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") long environmentCode,
+ @RequestParam(value = "workflowInstancePriority", required = false) Priority workflowInstancePriority) {
Map result = schedulerService.updateScheduleByWorkflowDefinitionCode(loginUser, projectCode,
- processDefinitionCode, schedule,
- warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, tenantCode,
+ workflowDefinitionCode, schedule,
+ warningType, warningGroupId, failureStrategy, workflowInstancePriority, workerGroup, tenantCode,
environmentCode);
return returnDataList(result);
}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java
index 2f99f098ed..3fb5ec4e01 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java
@@ -299,7 +299,7 @@ public class TaskGroupController extends BaseController {
*
* @param groupId ID for task group
* @param taskName Task Name
- * @param processName Process instance name
+ * @param workflowInstanceName workflow instance name
* @param status Task queue status
* @param loginUser login user
* @param pageNo page number
@@ -310,7 +310,7 @@ public class TaskGroupController extends BaseController {
@Parameters({
@Parameter(name = "groupId", description = "GROUP_ID", required = false, schema = @Schema(implementation = int.class, example = "1", defaultValue = "-1")),
@Parameter(name = "taskInstanceName", description = "TASK_INSTANCE_NAME", required = false, schema = @Schema(implementation = String.class, example = "taskName")),
- @Parameter(name = "processInstanceName", description = "PROCESS_INSTANCE_NAME", required = false, schema = @Schema(implementation = String.class, example = "processName")),
+ @Parameter(name = "workflowInstanceName", description = "WORKFLOW_INSTANCE_NAME", required = false, schema = @Schema(implementation = String.class, example = "workflowInstanceName")),
@Parameter(name = "status", description = "TASK_GROUP_STATUS", required = false, schema = @Schema(implementation = int.class, example = "1")),
@Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")),
@Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20"))
@@ -321,14 +321,14 @@ public class TaskGroupController extends BaseController {
public Result queryTaskGroupQueues(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "groupId", required = false, defaultValue = "-1") Integer groupId,
@RequestParam(value = "taskInstanceName", required = false) String taskName,
- @RequestParam(value = "processInstanceName", required = false) String processName,
+ @RequestParam(value = "workflowInstanceName", required = false) String workflowInstanceName,
@RequestParam(value = "status", required = false) Integer status,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) {
Map result = taskGroupQueueService.queryTasksByGroupId(
loginUser,
taskName,
- processName,
+ workflowInstanceName,
status,
groupId,
pageNo,
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java
index ef9d3b4b63..777da9bc21 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java
@@ -69,7 +69,7 @@ public class TaskInstanceController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
- * @param processInstanceId process instance id
+ * @param workflowInstanceId workflow instance id
* @param searchVal search value
* @param taskName task name
* @param stateType state type
@@ -83,8 +83,8 @@ public class TaskInstanceController extends BaseController {
*/
@Operation(summary = "queryTaskListPaging", description = "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES")
@Parameters({
- @Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class, example = "100")),
- @Parameter(name = "processInstanceName", description = "PROCESS_INSTANCE_NAME", schema = @Schema(implementation = String.class)),
+ @Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class, example = "100")),
+ @Parameter(name = "workflowInstanceName", description = "WORKFLOW_INSTANCE_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
@Parameter(name = "taskName", description = "TASK_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "taskCode", description = "TASK_CODE", schema = @Schema(implementation = Long.class)),
@@ -102,9 +102,9 @@ public class TaskInstanceController extends BaseController {
@ApiException(QUERY_TASK_LIST_PAGING_ERROR)
public Result queryTaskListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
- @RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId,
- @RequestParam(value = "processInstanceName", required = false) String processInstanceName,
- @RequestParam(value = "processDefinitionName", required = false) String processDefinitionName,
+ @RequestParam(value = "workflowInstanceId", required = false, defaultValue = "0") Integer workflowInstanceId,
+ @RequestParam(value = "workflowInstanceName", required = false) String workflowInstanceName,
+ @RequestParam(value = "workflowDefinitionName", required = false) String workflowDefinitionName,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam(value = "taskName", required = false) String taskName,
@RequestParam(value = "taskCode", required = false) Long taskCode,
@@ -121,9 +121,9 @@ public class TaskInstanceController extends BaseController {
return taskInstanceService.queryTaskListPaging(
loginUser,
projectCode,
- processInstanceId,
- processInstanceName,
- processDefinitionName,
+ workflowInstanceId,
+ workflowInstanceName,
+ workflowDefinitionName,
taskName,
taskCode,
executorName,
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowDefinitionController.java
index b531f3a16c..5f203b9f15 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowDefinitionController.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowDefinitionController.java
@@ -82,7 +82,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
*/
@Tag(name = "WORKFLOW_DEFINITION_TAG")
@RestController
-@RequestMapping("projects/{projectCode}/process-definition")
+@RequestMapping("projects/{projectCode}/workflow-definition")
@Slf4j
public class WorkflowDefinitionController extends BaseController {
@@ -204,10 +204,10 @@ public class WorkflowDefinitionController extends BaseController {
public Result verifyWorkflowDefinitionName(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "name", required = true) String name,
- @RequestParam(value = "code", required = false, defaultValue = "0") long processDefinitionCode) {
+ @RequestParam(value = "workflowDefinitionCode", required = false, defaultValue = "0") long workflowDefinitionCode) {
Map result =
workflowDefinitionService.verifyWorkflowDefinitionName(loginUser, projectCode, name,
- processDefinitionCode);
+ workflowDefinitionCode);
return returnDataList(result);
}
@@ -581,7 +581,7 @@ public class WorkflowDefinitionController extends BaseController {
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class, example = "100"))
})
- @GetMapping(value = "/query-process-definition-list")
+ @GetMapping(value = "/query-workflow-definition-list")
@ResponseStatus(HttpStatus.OK)
@ApiException(GET_TASKS_LIST_BY_WORKFLOW_DEFINITION_CODE_ERROR)
public Result getWorkflowListByProjectCode(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@@ -600,20 +600,20 @@ public class WorkflowDefinitionController extends BaseController {
@Operation(summary = "getTaskListByWorkflowDefinitionCode", description = "GET_TASK_LIST_BY_WORKFLOW_CODE_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
- @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
+ @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
})
@GetMapping(value = "/query-task-definition-list")
@ResponseStatus(HttpStatus.OK)
@ApiException(GET_TASKS_LIST_BY_WORKFLOW_DEFINITION_CODE_ERROR)
public Result getTaskListByWorkflowDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
- @RequestParam(value = "processDefinitionCode") Long processDefinitionCode) {
+ @RequestParam(value = "workflowDefinitionCode") Long workflowDefinitionCode) {
Map result = workflowDefinitionService
- .queryTaskDefinitionListByWorkflowDefinitionCode(projectCode, processDefinitionCode);
+ .queryTaskDefinitionListByWorkflowDefinitionCode(projectCode, workflowDefinitionCode);
return returnDataList(result);
}
- @Operation(summary = "deleteByCode", description = "DELETE_WORKFLOW_DEFINITION_BY_ID_NOTES")
+ @Operation(summary = "deleteByWorkflowDefinitionCode", description = "DELETE_WORKFLOW_DEFINITION_BY_ID_NOTES")
@Parameters({
@Parameter(name = "code", description = "WORKFLOW_DEFINITION_CODE", schema = @Schema(implementation = int.class, example = "100"))
})
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceController.java
index 6ab26c1909..bd7fb4619b 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceController.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceController.java
@@ -67,7 +67,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
*/
@Tag(name = "WORKFLOW_INSTANCE_TAG")
@RestController
-@RequestMapping("/projects/{projectCode}/process-instances")
+@RequestMapping("/projects/{projectCode}/workflow-instances")
@Slf4j
public class WorkflowInstanceController extends BaseController {
@@ -81,7 +81,7 @@ public class WorkflowInstanceController extends BaseController {
* @param projectCode project code
* @param pageNo page number
* @param pageSize page size
- * @param processDefineCode process definition code
+ * @param workflowDefinitionCode workflow definition code
* @param searchVal search value
* @param stateType state type
* @param host host
@@ -92,7 +92,7 @@ public class WorkflowInstanceController extends BaseController {
*/
@Operation(summary = "queryWorkflowInstanceListPaging", description = "QUERY_WORKFLOW_INSTANCE_LIST_NOTES")
@Parameters({
- @Parameter(name = "processDefineCode", description = "WORKFLOW_DEFINITION_CODE", schema = @Schema(implementation = long.class, example = "100")),
+ @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", schema = @Schema(implementation = long.class, example = "100")),
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
@Parameter(name = "executorName", description = "EXECUTOR_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "stateType", description = "EXECUTION_STATUS", schema = @Schema(implementation = WorkflowExecutionStatus.class)),
@@ -107,7 +107,7 @@ public class WorkflowInstanceController extends BaseController {
@ApiException(Status.QUERY_WORKFLOW_INSTANCE_LIST_PAGING_ERROR)
public Result queryWorkflowInstanceList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
- @RequestParam(value = "processDefineCode", required = false, defaultValue = "0") long processDefineCode,
+ @RequestParam(value = "workflowDefinitionCode", required = false, defaultValue = "0") long workflowDefinitionCode,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam(value = "executorName", required = false) String executorName,
@RequestParam(value = "stateType", required = false) WorkflowExecutionStatus stateType,
@@ -120,7 +120,8 @@ public class WorkflowInstanceController extends BaseController {
checkPageParams(pageNo, pageSize);
searchVal = ParameterUtils.handleEscapes(searchVal);
- return workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, processDefineCode, startTime,
+ return workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, workflowDefinitionCode,
+ startTime,
endTime,
searchVal, executorName, stateType, host, otherParamsJson, pageNo, pageSize);
}
@@ -153,7 +154,7 @@ public class WorkflowInstanceController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
- * @param taskRelationJson process task relation json
+ * @param taskRelationJson workflow task relation json
* @param taskDefinitionJson taskDefinitionJson
* @param id workflow instance id
* @param scheduleTime schedule time
@@ -168,9 +169,9 @@ public class WorkflowInstanceController extends BaseController {
@Parameter(name = "id", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "1")),
@Parameter(name = "scheduleTime", description = "SCHEDULE_TIME", schema = @Schema(implementation = String.class)),
@Parameter(name = "syncDefine", description = "SYNC_DEFINE", required = true, schema = @Schema(implementation = boolean.class, example = "false")),
- @Parameter(name = "globalParams", description = "PROCESS_GLOBAL_PARAMS", schema = @Schema(implementation = String.class, example = "[]")),
+ @Parameter(name = "globalParams", description = "WORKFLOW_GLOBAL_PARAMS", schema = @Schema(implementation = String.class, example = "[]")),
@Parameter(name = "locations", description = "WORKFLOW_INSTANCE_LOCATIONS", schema = @Schema(implementation = String.class)),
- @Parameter(name = "timeout", description = "PROCESS_TIMEOUT", schema = @Schema(implementation = int.class, example = "0")),
+ @Parameter(name = "timeout", description = "WORKFLOW_TIMEOUT", schema = @Schema(implementation = int.class, example = "0")),
})
@PutMapping(value = "/{id}")
@ResponseStatus(HttpStatus.OK)
@@ -223,7 +224,7 @@ public class WorkflowInstanceController extends BaseController {
* @param endTime end time
* @return list of workflow instance
*/
- @Operation(summary = "queryTopNLongestRunningWorkflowInstance", description = "QUERY_TOPN_LONGEST_RUNNING_PROCESS_INSTANCE_NOTES")
+ @Operation(summary = "queryTopNLongestRunningWorkflowInstance", description = "QUERY_TOPN_LONGEST_RUNNING_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "size", description = "WORKFLOW_INSTANCE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "10")),
@Parameter(name = "startTime", description = "WORKFLOW_INSTANCE_START_TIME", required = true, schema = @Schema(implementation = String.class)),
@@ -381,13 +382,13 @@ public class WorkflowInstanceController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
- * @param processInstanceIds workflow instance id
+ * @param workflowInstanceIds workflow instance id
* @return delete result code
*/
@Operation(summary = "batchDeleteWorkflowInstanceByIds", description = "BATCH_DELETE_WORKFLOW_INSTANCE_BY_IDS_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = int.class)),
- @Parameter(name = "processInstanceIds", description = "PROCESS_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)),
+ @Parameter(name = "workflowInstanceIds", description = "WORKFLOW_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)),
})
@PostMapping(value = "/batch-delete")
@ResponseStatus(HttpStatus.OK)
@@ -395,21 +396,21 @@ public class WorkflowInstanceController extends BaseController {
@OperatorLog(auditType = AuditType.WORKFLOW_INSTANCE_BATCH_DELETE)
public Result batchDeleteWorkflowInstanceByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable long projectCode,
- @RequestParam("processInstanceIds") String processInstanceIds) {
+ @RequestParam("workflowInstanceIds") String workflowInstanceIds) {
// task queue
Map result = new HashMap<>();
List deleteFailedIdList = new ArrayList<>();
- if (!StringUtils.isEmpty(processInstanceIds)) {
- String[] processInstanceIdArray = processInstanceIds.split(Constants.COMMA);
+ if (!StringUtils.isEmpty(workflowInstanceIds)) {
+ String[] workflowInstanceIdArray = workflowInstanceIds.split(Constants.COMMA);
- for (String strProcessInstanceId : processInstanceIdArray) {
- int processInstanceId = Integer.parseInt(strProcessInstanceId);
+ for (String strWorkflowInstanceId : workflowInstanceIdArray) {
+ int workflowInstanceId = Integer.parseInt(strWorkflowInstanceId);
try {
- workflowInstanceService.deleteWorkflowInstanceById(loginUser, processInstanceId);
+ workflowInstanceService.deleteWorkflowInstanceById(loginUser, workflowInstanceId);
} catch (Exception e) {
- log.error("Delete workflow instance: {} error", strProcessInstanceId, e);
+ log.error("Delete workflow instance: {} error", strWorkflowInstanceId, e);
deleteFailedIdList
- .add(MessageFormat.format(Status.WORKFLOW_INSTANCE_ERROR.getMsg(), strProcessInstanceId));
+ .add(MessageFormat.format(Status.WORKFLOW_INSTANCE_ERROR.getMsg(), strWorkflowInstanceId));
}
}
}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowLineageController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowLineageController.java
index 25a0ee522e..58bb10e05b 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowLineageController.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowLineageController.java
@@ -75,10 +75,10 @@ public class WorkflowLineageController extends BaseController {
@ApiException(QUERY_WORKFLOW_LINEAGE_ERROR)
public Result> queryWorkFlowLineageByName(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
- @RequestParam(value = "processDefinitionName", required = false) String processDefinitionName) {
- processDefinitionName = ParameterUtils.handleEscapes(processDefinitionName);
+ @RequestParam(value = "workflowDefinitionName", required = false) String workflowDefinitionName) {
+ workflowDefinitionName = ParameterUtils.handleEscapes(workflowDefinitionName);
List workFlowLineages =
- workflowLineageService.queryWorkFlowLineageByName(projectCode, processDefinitionName);
+ workflowLineageService.queryWorkFlowLineageByName(projectCode, workflowDefinitionName);
return Result.success(workFlowLineages);
}
@@ -117,14 +117,14 @@ public class WorkflowLineageController extends BaseController {
*
* @param loginUser login user
* @param projectCode project codes which taskCode belong
- * @param processDefinitionCode project code which taskCode belong
+ * @param workflowDefinitionCode project code which taskCode belong
* @param taskCode task definition code
* @return Result of task can be deleted or not
*/
@Operation(summary = "verifyTaskCanDelete", description = "VERIFY_TASK_CAN_DELETE")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class)),
- @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
+ @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "taskCode", description = "TASK_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "123456789")),
})
@PostMapping(value = "/tasks/verify-delete")
@@ -132,11 +132,11 @@ public class WorkflowLineageController extends BaseController {
@ApiException(TASK_WITH_DEPENDENT_ERROR)
public Result