From d09a2d07c92fef9c73d93b31df3447729948f668 Mon Sep 17 00:00:00 2001 From: qiaozhanwei Date: Sun, 29 Sep 2019 15:11:10 +0800 Subject: [PATCH] rename project and doc --- docs/en_US/EasyScheduler-FAQ.md | 22 +- docs/en_US/SUMMARY.md | 2 +- docs/en_US/backend-deployment.md | 56 +- docs/en_US/backend-development.md | 4 +- docs/en_US/frontend-deployment.md | 8 +- docs/en_US/frontend-development.md | 8 +- docs/en_US/quick-start.md | 2 +- docs/en_US/system-manual.md | 4 +- docs/en_US/upgrade.md | 6 +- docs/zh_CN/EasyScheduler-FAQ.md | 22 +- docs/zh_CN/SUMMARY.md | 2 +- docs/zh_CN/任务插件开发.md | 12 +- docs/zh_CN/前端开发文档.md | 8 +- docs/zh_CN/前端部署文档.md | 8 +- docs/zh_CN/升级文档.md | 6 +- docs/zh_CN/后端开发文档.md | 4 +- docs/zh_CN/后端部署文档.md | 56 +- docs/zh_CN/快速上手.md | 2 +- docs/zh_CN/系统使用手册.md | 4 +- dolphinscheduler-alert/pom.xml | 129 ++ .../src/main/assembly/package.xml | 40 + .../dolphinscheduler/alert/AlertServer.java | 89 + .../alert/manager/EmailManager.java | 55 + .../manager/EnterpriseWeChatManager.java | 57 + .../alert/manager/MsgManager.java | 36 + .../alert/runner/AlertSender.java | 142 ++ .../alert/utils/Constants.java | 157 ++ .../alert/utils/EnterpriseWeChatUtils.java | 246 +++ .../alert/utils/ExcelUtils.java | 131 ++ .../alert/utils/FuncUtils.java | 35 + .../alert/utils/JSONUtils.java | 68 + .../alert/utils/MailUtils.java | 456 +++++ .../alert/utils/PropertyUtils.java | 193 ++ .../src/main/resources/alert.properties | 0 .../src/main/resources/alert_logback.xml | 31 + .../resources/application_alert.properties | 0 .../mail_templates/alert_mail_template.ftl | 0 .../utils/EnterpriseWeChatUtilsTest.java | 119 ++ .../alert/utils/MailUtilsTest.java | 231 +++ dolphinscheduler-api/pom.xml | 219 +++ .../src/main/assembly/package.xml | 74 + .../api/ApiApplicationServer.java | 37 + .../api/CombinedApplicationServer.java | 57 + .../api/configuration/AppConfiguration.java | 115 ++ .../ServiceModelToSwagger2MapperImpl.java | 509 +++++ .../api/configuration/SwaggerConfig.java | 55 + .../api/controller/AccessTokenController.java | 184 ++ .../api/controller/AlertGroupController.java | 252 +++ .../api/controller/BaseController.java | 272 +++ .../controller/DataAnalysisController.java | 199 ++ .../api/controller/DataSourceController.java | 458 +++++ .../api/controller/ExecutorController.java | 199 ++ .../api/controller/LoggerController.java | 106 + .../api/controller/LoginController.java | 147 ++ .../api/controller/MonitorController.java | 131 ++ .../ProcessDefinitionController.java | 491 +++++ .../controller/ProcessInstanceController.java | 367 ++++ .../api/controller/ProjectController.java | 304 +++ .../api/controller/QueueController.java | 200 ++ .../api/controller/ResourcesController.java | 714 +++++++ .../api/controller/SchedulerController.java | 336 ++++ .../controller/TaskInstanceController.java | 97 + .../api/controller/TaskRecordController.java | 113 ++ .../api/controller/TenantController.java | 245 +++ .../api/controller/UsersController.java | 454 +++++ .../api/controller/WorkerGroupController.java | 170 ++ .../api/dto/CommandStateCount.java | 60 + .../api/dto/DefineUserDto.java | 55 + .../api/dto/ScheduleParam.java | 72 + .../api/dto/TaskCountDto.java | 135 ++ .../api/dto/TaskStateCount.java | 50 + .../api/dto/gantt/GanttDto.java | 103 + .../dolphinscheduler/api/dto/gantt/Task.java | 138 ++ .../api/dto/treeview/Instance.java | 171 ++ .../api/dto/treeview/TreeViewDto.java | 84 + .../api/enums/ExecuteType.java | 40 + .../dolphinscheduler/api/enums/Status.java | 266 +++ .../api/interceptor/DruidStatFilter.java | 29 + .../api/interceptor/DruidStatViewServlet.java | 34 + .../interceptor/LoginHandlerInterceptor.java | 111 ++ .../dolphinscheduler/api/log/LogClient.java | 134 ++ .../api/service/AccessTokenService.java | 181 ++ .../api/service/AlertGroupService.java | 294 +++ .../api/service/BaseDAGService.java | 89 + .../api/service/BaseService.java | 130 ++ .../api/service/DataAnalysisService.java | 416 ++++ .../api/service/DataSourceService.java | 690 +++++++ .../api/service/ExecutorService.java | 540 +++++ .../api/service/LoggerService.java | 91 + .../api/service/MonitorService.java | 127 ++ .../api/service/ProcessDefinitionService.java | 1120 +++++++++++ .../api/service/ProcessInstanceService.java | 723 +++++++ .../api/service/ProjectService.java | 396 ++++ .../api/service/QueueService.java | 259 +++ .../api/service/ResourcesService.java | 897 +++++++++ .../api/service/SchedulerService.java | 594 ++++++ .../api/service/SessionService.java | 150 ++ .../api/service/TaskInstanceService.java | 134 ++ .../api/service/TaskRecordService.java | 85 + .../api/service/TenantService.java | 300 +++ .../api/service/UdfFuncService.java | 328 ++++ .../api/service/UsersService.java | 694 +++++++ .../api/service/WorkerGroupService.java | 156 ++ .../api/utils/CheckUtils.java | 159 ++ .../dolphinscheduler/api/utils/Constants.java | 129 ++ .../dolphinscheduler/api/utils/FileUtils.java | 79 + .../dolphinscheduler/api/utils/PageInfo.java | 117 ++ .../dolphinscheduler/api/utils/Result.java | 82 + .../api/utils/ZooKeeperState.java | 211 ++ .../api/utils/ZookeeperMonitor.java | 89 + .../src/main/resources/apiserver_logback.xml | 42 + .../src/main/resources/application.properties | 19 + .../src/main/resources/combined_logback.xml | 62 + .../main/resources/i18n/messages.properties | 0 .../resources/i18n/messages_en_US.properties | 0 .../resources/i18n/messages_zh_CN.properties | 0 .../dolphinscheduler/api/HttpClientTest.java | 157 ++ .../controller/AbstractControllerTest.java | 83 + .../DataAnalysisControllerTest.java | 97 + .../controller/DataSourceControllerTest.java | 81 + .../controller/ExecutorControllerTest.java | 74 + .../api/controller/LoggerControllerTest.java | 58 + .../api/controller/LoginControllerTest.java | 60 + .../api/controller/MonitorControllerTest.java | 82 + .../ProcessDefinitionControllerTest.java | 65 + .../ProcessInstanceControllerTest.java | 54 + .../api/controller/ProjectControllerTest.java | 60 + .../api/controller/QueueControllerTest.java | 133 ++ .../controller/ResourcesControllerTest.java | 80 + .../controller/SchedulerControllerTest.java | 66 + .../TaskInstanceControllerTest.java | 65 + .../api/controller/TenantControllerTest.java | 54 + .../api/controller/UsersControllerTest.java | 53 + .../api/service/DataAnalysisServiceTest.java | 51 + .../api/service/DataSourceServiceTest.java | 53 + .../api/service/ExecutorServiceTest.java | 72 + .../api/service/LoggerServiceTest.java | 52 + .../service/ProcessDefinitionServiceTest.java | 90 + .../service/ProcessInstanceServiceTest.java | 95 + .../api/service/ResourcesServiceTest.java | 53 + .../api/service/SchedulerServiceTest.java | 58 + .../api/service/SessionServiceTest.java | 52 + .../api/service/TaskInstanceServiceTest.java | 62 + .../api/service/TenantServiceTest.java | 53 + .../api/service/UdfFuncServiceTest.java | 58 + .../api/service/UsersServiceTest.java | 54 + .../api/utils/ZookeeperMonitorUtilsTest.java | 30 + .../test/resources/dao/data_source.properties | 0 dolphinscheduler-common/pom.xml | 595 ++++++ .../dolphinscheduler/common/Constants.java | 923 +++++++++ .../dolphinscheduler/common/IStoppable.java | 29 + .../common/enums/AlertStatus.java | 27 + .../common/enums/AlertType.java | 27 + .../common/enums/CommandType.java | 40 + .../common/enums/CycleEnum.java | 28 + .../common/enums/DataType.java | 35 + .../dolphinscheduler/common/enums/DbType.java | 33 + .../common/enums/DependResult.java | 31 + .../common/enums/DependStrategy.java | 29 + .../common/enums/DependentRelation.java | 25 + .../dolphinscheduler/common/enums/Direct.java | 27 + .../common/enums/ExecutionStatus.java | 101 + .../common/enums/FailureStrategy.java | 30 + .../dolphinscheduler/common/enums/Flag.java | 33 + .../common/enums/HttpCheckCondition.java | 30 + .../common/enums/HttpMethod.java | 31 + .../common/enums/HttpParametersType.java | 29 + .../common/enums/Priority.java | 31 + .../common/enums/ProgramType.java | 30 + .../common/enums/ReleaseState.java | 40 + .../common/enums/ResUploadType.java | 29 + .../common/enums/ResourceType.java | 27 + .../common/enums/RunMode.java | 28 + .../common/enums/SelfDependStrategy.java | 29 + .../common/enums/ServerEnum.java | 29 + .../common/enums/ShowType.java | 34 + .../common/enums/TaskDependType.java | 30 + .../common/enums/TaskRecordStatus.java | 35 + .../common/enums/TaskStateType.java | 67 + .../common/enums/TaskTimeoutStrategy.java | 29 + .../common/enums/TaskType.java | 42 + .../common/enums/UdfType.java | 27 + .../common/enums/UserType.java | 28 + .../common/enums/WarningType.java | 31 + .../common/enums/ZKNodeType.java | 15 + .../dolphinscheduler/common/graph/DAG.java | 519 +++++ .../common/job/db/BaseDataSource.java | 112 ++ .../common/job/db/ClickHouseDataSource.java | 75 + .../common/job/db/DataSourceFactory.java | 92 + .../common/job/db/HiveDataSource.java | 82 + .../common/job/db/MySQLDataSource.java | 73 + .../common/job/db/OracleDataSource.java | 75 + .../common/job/db/PostgreDataSource.java | 77 + .../common/job/db/SQLServerDataSource.java | 71 + .../common/job/db/SparkDataSource.java | 76 + .../common/model/DateInterval.java | 62 + .../common/model/DependentItem.java | 80 + .../common/model/DependentTaskModel.java | 44 + .../common/model/MasterServer.java | 130 ++ .../common/model/TaskNode.java | 324 +++ .../common/model/TaskNodeRelation.java | 67 + .../common/process/HttpProperty.java | 125 ++ .../common/process/ProcessDag.java | 85 + .../common/process/Property.java | 143 ++ .../common/process/ResourceInfo.java | 37 + .../common/queue/ITaskQueue.java | 95 + .../common/queue/TaskQueueFactory.java | 54 + .../common/queue/TaskQueueZkImpl.java | 439 +++++ .../common/shell/AbstractShell.java | 341 ++++ .../common/shell/ShellExecutor.java | 175 ++ .../common/task/AbstractParameters.java | 69 + .../common/task/IParameters.java | 38 + .../common/task/TaskTimeoutParameter.java | 81 + .../task/dependent/DependentParameters.java | 58 + .../common/task/flink/FlinkParameters.java | 219 +++ .../common/task/http/HttpParameters.java | 108 + .../common/task/mr/MapreduceParameters.java | 145 ++ .../task/procedure/ProcedureParameters.java | 89 + .../common/task/python/PythonParameters.java | 67 + .../common/task/shell/ShellParameters.java | 70 + .../common/task/spark/SparkParameters.java | 220 +++ .../common/task/sql/SqlBinds.java | 42 + .../common/task/sql/SqlParameters.java | 213 ++ .../common/task/sql/SqlType.java | 27 + .../task/subprocess/SubProcessParameters.java | 48 + .../common/thread/Stopper.java | 39 + .../common/thread/ThreadPoolExecutors.java | 310 +++ .../common/thread/ThreadUtils.java | 202 ++ .../dolphinscheduler/common/utils/Bytes.java | 697 +++++++ .../common/utils/CollectionUtils.java | 292 +++ .../common/utils/CommonUtils.java | 90 + .../common/utils/ConnectionUtils.java | 104 + .../common/utils/DateUtils.java | 356 ++++ .../common/utils/DependentUtils.java | 144 ++ .../common/utils/EncryptionUtils.java | 37 + .../common/utils/EnumFieldUtil.java | 37 + .../common/utils/FileUtils.java | 427 ++++ .../common/utils/HadoopUtils.java | 581 ++++++ .../common/utils/HttpUtils.java | 100 + .../common/utils/IpUtils.java | 64 + .../common/utils/JSONUtils.java | 271 +++ .../common/utils/OSUtils.java | 297 +++ .../common/utils/ParameterUtils.java | 176 ++ .../common/utils/PropertyUtils.java | 215 ++ .../common/utils/ResInfo.java | 154 ++ .../common/utils/SchemaUtils.java | 150 ++ .../common/utils/ScriptRunner.java | 317 +++ .../common/utils/TaskParametersUtils.java | 80 + .../utils/dependent/DependentDateUtils.java | 169 ++ .../utils/placeholder/BusinessTimeUtils.java | 72 + .../utils/placeholder/PlaceholderUtils.java | 99 + .../PropertyPlaceholderHelper.java | 254 +++ .../placeholder/TimePlaceholderUtils.java | 512 +++++ .../common/zk/AbstractZKClient.java | 583 ++++++ .../main/resources/common/common.properties | 0 .../resources/common/hadoop/hadoop.properties | 0 .../src/main/resources/quartz.properties | 0 .../src/main/resources/zookeeper.properties | 0 .../common/graph/DAGTest.java | 356 ++++ .../common/os/OSUtilsTest.java | 146 ++ .../dolphinscheduler/common/os/OshiTest.java | 112 ++ .../common/queue/TaskQueueImplTest.java | 112 ++ .../common/shell/ShellExecutorTest.java | 76 + .../threadutils/ThreadPoolExecutorsTest.java | 53 + .../common/utils/CollectionUtilsTest.java | 63 + .../common/utils/CommonUtilsTest.java | 61 + .../common/utils/DateUtilsTest.java | 57 + .../common/utils/DependentUtilsTest.java | 200 ++ .../common/utils/FileUtilsTest.java | 28 + .../common/utils/HadoopUtilsTest.java | 90 + .../common/utils/HttpUtilsTest.java | 47 + .../common/utils/IpUtilsTest.java | 39 + .../common/utils/JSONUtilsTest.java | 158 ++ .../common/utils/PropertyUtilsTest.java | 31 + .../common/utils/StringTest.java | 60 + .../placeholder/TimePlaceholderUtilsTest.java | 68 + .../common/zk/StandaloneZKServerForTest.java | 100 + .../test/resources/dao/data_source.properties | 0 dolphinscheduler-dao/pom.xml | 192 ++ .../dolphinscheduler/dao/AbstractBaseDao.java | 27 + .../apache/dolphinscheduler/dao/AlertDao.java | 172 ++ .../org/apache/dolphinscheduler/dao/App.java | 11 + .../dolphinscheduler/dao/DaoFactory.java | 63 + .../dolphinscheduler/dao/MonitorDBDao.java | 145 ++ .../dolphinscheduler/dao/ProcessDao.java | 1747 +++++++++++++++++ .../dolphinscheduler/dao/TaskRecordDao.java | 299 +++ .../dao/config/MybatisPlusConfig.java | 17 + .../dao/config/YmlConfig.java | 59 + .../dao/datasource/ConnectionFactory.java | 99 + .../dao/datasource/DatabaseConfiguration.java | 0 .../dao/entity/AccessToken.java | 108 + .../dolphinscheduler/dao/entity/Alert.java | 238 +++ .../dao/entity/AlertGroup.java | 110 ++ .../dolphinscheduler/dao/entity/Command.java | 288 +++ .../dao/entity/CommandCount.java | 58 + .../dao/entity/CycleDependency.java | 93 + .../dao/entity/DataSource.java | 196 ++ .../dao/entity/DatasourceUser.java | 121 ++ .../dao/entity/DefinitionGroupByUser.java | 62 + .../dao/entity/Dependency.java | 62 + .../dao/entity/ErrorCommand.java | 297 +++ .../dao/entity/ExecuteStatusCount.java | 61 + .../dao/entity/MonitorRecord.java | 115 ++ .../dao/entity/ProcessData.java | 105 + .../dao/entity/ProcessDefinition.java | 385 ++++ .../dao/entity/ProcessInstance.java | 620 ++++++ .../dao/entity/ProcessInstanceMap.java | 93 + .../dolphinscheduler/dao/entity/Project.java | 211 ++ .../dao/entity/ProjectUser.java | 141 ++ .../dolphinscheduler/dao/entity/Queue.java | 106 + .../dolphinscheduler/dao/entity/Resource.java | 217 ++ .../dao/entity/ResourcesUser.java | 123 ++ .../dolphinscheduler/dao/entity/Schedule.java | 310 +++ .../dolphinscheduler/dao/entity/Session.java | 95 + .../dao/entity/TaskInstance.java | 518 +++++ .../dao/entity/TaskRecord.java | 256 +++ .../dolphinscheduler/dao/entity/Tenant.java | 167 ++ .../dolphinscheduler/dao/entity/UDFUser.java | 123 ++ .../dolphinscheduler/dao/entity/UdfFunc.java | 232 +++ .../dolphinscheduler/dao/entity/User.java | 273 +++ .../dao/entity/UserAlertGroup.java | 140 ++ .../dao/entity/WorkerGroup.java | 96 + .../dao/entity/WorkerServer.java | 127 ++ .../dao/entity/ZookeeperRecord.java | 217 ++ .../dao/mapper/AccessTokenMapper.java | 38 + .../dao/mapper/AlertGroupMapper.java | 45 + .../dao/mapper/AlertMapper.java | 31 + .../dao/mapper/CommandMapper.java | 47 + .../dao/mapper/DataSourceMapper.java | 44 + .../dao/mapper/DataSourceUserMapper.java | 31 + .../dao/mapper/ErrorCommandMapper.java | 33 + .../dao/mapper/ProcessDefinitionMapper.java | 45 + .../dao/mapper/ProcessInstanceMapMapper.java | 38 + .../dao/mapper/ProcessInstanceMapper.java | 74 + .../dao/mapper/ProjectMapper.java | 45 + .../dao/mapper/ProjectUserMapper.java | 30 + .../dao/mapper/QueueMapper.java | 37 + .../dao/mapper/ResourceMapper.java | 72 + .../dao/mapper/ResourceUserMapper.java | 28 + .../dao/mapper/ScheduleMapper.java | 39 + .../dao/mapper/SessionMapper.java | 29 + .../dao/mapper/TaskInstanceMapper.java | 74 + .../dao/mapper/TenantMapper.java | 34 + .../dao/mapper/UDFUserMapper.java | 30 + .../dao/mapper/UdfFuncMapper.java | 44 + .../dao/mapper/UserAlertGroupMapper.java | 34 + .../dao/mapper/UserMapper.java | 48 + .../dao/mapper/WorkerGroupMapper.java | 36 + .../dao/upgrade/DolphinSchedulerManager.java | 114 ++ .../dao/upgrade/MysqlUpgradeDao.java | 101 + .../dao/upgrade/PostgresqlUpgradeDao.java | 133 ++ .../dao/upgrade/UpgradeDao.java | 359 ++++ .../upgrade/shell/CreateDolphinScheduler.java | 45 + .../upgrade/shell/InitDolphinScheduler.java | 38 + .../shell/UpgradeDolphinScheduler.java | 43 + .../dao/utils/BeanContext.java | 51 + .../dolphinscheduler/dao/utils/DagHelper.java | 330 ++++ .../dao/utils/PropertyUtils.java | 192 ++ .../dao/utils/cron/AbstractCycle.java | 169 ++ .../dao/utils/cron/CronUtils.java | 207 ++ .../dao/utils/cron/CycleFactory.java | 211 ++ .../dao/utils/cron/CycleLinks.java | 64 + .../src/main/resources/application.yml | 0 .../AccessTokenMapper.xml | 16 + .../AlertGroupMapper.xml | 30 + .../cn.escheduler.dao.mapper/AlertMapper.xml | 9 + .../CommandMapper.xml | 26 + .../DataSourceMapper.xml | 62 + .../DataSourceUserMapper.xml | 13 + .../ErrorCommandMapper.xml | 19 + .../ProcessDefinitionMapper.xml | 56 + .../ProcessInstanceMapMapper.xml | 26 + .../ProcessInstanceMapper.xml | 123 ++ .../ProjectMapper.xml | 65 + .../ProjectUserMapper.xml | 19 + .../cn.escheduler.dao.mapper/QueueMapper.xml | 25 + .../ResourceMapper.xml | 57 + .../ResourceUserMapper.xml | 15 + .../ScheduleMapper.xml | 41 + .../SessionMapper.xml | 9 + .../TaskInstanceMapper.xml | 96 + .../cn.escheduler.dao.mapper/TenantMapper.xml | 24 + .../UDFUserMapper.xml | 12 + .../UdfFuncMapper.xml | 51 + .../UserAlertGroupMapper.xml | 20 + .../cn.escheduler.dao.mapper/UserMapper.xml | 55 + .../WorkerGroupMapper.xml | 23 + .../resources/dao/data_source.properties__ | 0 .../dao/cron/CronUtilsTest.java | 181 ++ .../dao/mapper/AccessTokenMapperTest.java | 94 + .../dao/mapper/AlertGroupMapperTest.java | 135 ++ .../dao/mapper/AlertMapperTest.java | 89 + .../dao/mapper/CommandMapperTest.java | 153 ++ .../dao/mapper/DataSourceMapperTest.java | 150 ++ .../dao/mapper/DataSourceUserMapperTest.java | 91 + .../dao/mapper/ErrorCommandMapperTest.java | 113 ++ .../mapper/ProcessDefinitionMapperTest.java | 197 ++ .../mapper/ProcessInstanceMapMapperTest.java | 131 ++ .../dao/mapper/ProcessInstanceMapperTest.java | 289 +++ .../dao/mapper/ProjectMapperTest.java | 56 + .../dao/mapper/ProjectUserMapperTest.java | 93 + .../dao/mapper/QueueMapperTest.java | 107 + .../dao/mapper/ResourceMapperTest.java | 52 + .../dao/mapper/ResourceUserMapperTest.java | 87 + .../dao/mapper/ScheduleMapperTest.java | 193 ++ .../dao/mapper/SessionMapperTest.java | 82 + .../dao/mapper/TaskInstanceMapperTest.java | 64 + .../dao/mapper/TenantMapperTest.java | 125 ++ .../dao/mapper/UDFUserMapperTest.java | 36 + .../dao/mapper/UdfFuncMapperTest.java | 48 + .../dao/mapper/UserAlertGroupMapperTest.java | 40 + .../dao/mapper/UserMapperTest.java | 326 +++ .../dao/mapper/WorkerGroupMapperTest.java | 113 ++ .../test/resources/dao/data_source.properties | 0 dolphinscheduler-rpc/pom.xml | 121 ++ .../rpc/LogViewServiceGrpc.java | 499 +++++ .../src/main/proto/scheduler.proto | 83 + dolphinscheduler-server/pom.xml | 127 ++ .../src/main/assembly/package.xml | 74 + .../server/master/AbstractServer.java | 85 + .../server/master/MasterServer.java | 275 +++ .../server/master/log/MasterLogFilter.java | 42 + .../runner/MasterBaseTaskExecThread.java | 132 ++ .../master/runner/MasterExecThread.java | 1042 ++++++++++ .../master/runner/MasterSchedulerThread.java | 108 + .../master/runner/MasterTaskExecThread.java | 165 ++ .../runner/SubProcessTaskExecThread.java | 178 ++ .../quartz/DruidConnectionProvider.java | 203 ++ .../server/quartz/ProcessScheduleJob.java | 135 ++ .../server/quartz/QuartzExecutors.java | 311 +++ .../server/rpc/LogClient.java | 122 ++ .../server/rpc/LoggerServer.java | 219 +++ .../server/utils/AlertManager.java | 240 +++ .../server/utils/FlinkArgsUtils.java | 110 ++ .../server/utils/LoggerUtils.java | 87 + .../server/utils/ParamUtils.java | 102 + .../server/utils/ProcessUtils.java | 320 +++ .../server/utils/SparkArgsUtils.java | 117 ++ .../server/utils/UDFUtils.java | 109 + .../server/worker/WorkerServer.java | 333 ++++ .../worker/log/TaskLogDiscriminator.java | 68 + .../server/worker/log/TaskLogFilter.java | 36 + .../server/worker/log/WorkerLogFilter.java | 40 + .../server/worker/runner/FetchTaskThread.java | 324 +++ .../worker/runner/TaskScheduleThread.java | 330 ++++ .../worker/task/AbstractCommandExecutor.java | 569 ++++++ .../server/worker/task/AbstractTask.java | 213 ++ .../server/worker/task/AbstractYarnTask.java | 91 + .../worker/task/PythonCommandExecutor.java | 158 ++ .../worker/task/ShellCommandExecutor.java | 96 + .../server/worker/task/TaskManager.java | 73 + .../server/worker/task/TaskProps.java | 290 +++ .../task/dependent/DependentExecute.java | 211 ++ .../worker/task/dependent/DependentTask.java | 172 ++ .../server/worker/task/flink/FlinkTask.java | 118 ++ .../server/worker/task/http/HttpTask.java | 270 +++ .../server/worker/task/mr/MapReduceTask.java | 144 ++ .../task/processdure/ProcedureTask.java | 347 ++++ .../server/worker/task/python/PythonTask.java | 140 ++ .../server/worker/task/shell/ShellTask.java | 160 ++ .../server/worker/task/spark/SparkTask.java | 109 + .../server/worker/task/sql/SqlTask.java | 447 +++++ .../server/zk/ZKMasterClient.java | 411 ++++ .../server/zk/ZKWorkerClient.java | 166 ++ .../resources/application_master.properties | 0 .../resources/application_worker.properties | 0 .../src/main/resources/master.properties | 0 .../src/main/resources/master_logback.xml | 34 + .../src/main/resources/worker.properties | 0 .../src/main/resources/worker_logback.xml | 61 + .../server/master/AlertManagerTest.java | 107 + .../server/master/MasterCommandTest.java | 139 ++ .../server/master/ParamsTest.java | 102 + .../server/worker/EnvFileTest.java | 64 + .../shell/ShellCommandExecutorTest.java | 103 + .../server/worker/sql/SqlExecutorTest.java | 146 ++ .../task/dependent/DependentTaskTest.java | 63 + .../server/zk/StandaloneZKServerForTest.java | 102 + .../server/zk/ZKWorkerClientTest.java | 33 + .../test/resources/dao/data_source.properties | 0 .../.babelrc | 0 .../.editorconfig | 0 {escheduler-ui => dolphinscheduler-ui}/.env | 0 .../.eslintrc | 0 .../build/combo.js | 0 .../build/config.js | 0 .../build/webpack.config.combined.js | 0 .../build/webpack.config.dev.js | 0 .../build/webpack.config.prod.js | 0 .../build/webpack.config.test.js | 0 dolphinscheduler-ui/install-escheduler-ui.sh | 219 +++ .../package.json | 0 .../src/combo/1.0.0/3rd.css | 0 .../src/combo/1.0.0/3rd.js | 0 .../src/combo/1.0.0/base.css | 0 .../src/combo/1.0.0/es5.js | 0 .../src/combo/1.0.0/local.js | 0 .../src/font/awesome/FontAwesome.otf | Bin .../src/font/awesome/font-awesome.css | 0 .../src/font/awesome/fontawesome-webfont.eot | Bin .../src/font/awesome/fontawesome-webfont.svg | 0 .../src/font/awesome/fontawesome-webfont.ttf | Bin .../src/font/awesome/fontawesome-webfont.woff | Bin .../font/awesome/fontawesome-webfont.woff2 | Bin .../src/font/demo.css | 0 .../src/font/demo_index.html | 0 .../src/font/iconfont.css | 0 .../src/font/iconfont.eot | Bin .../src/font/iconfont.js | 0 .../src/font/iconfont.svg | 0 .../src/font/iconfont.ttf | Bin .../src/font/iconfont.woff | Bin .../src/font/iconfont.woff2 | Bin .../src/images/favicon.ico | Bin .../src/js/conf/home/App.vue | 0 .../src/js/conf/home/index.js | 0 .../js/conf/home/pages/dag/_source/config.js | 0 .../src/js/conf/home/pages/dag/_source/dag.js | 0 .../js/conf/home/pages/dag/_source/dag.scss | 0 .../js/conf/home/pages/dag/_source/dag.vue | 0 .../_source/formModel/_source/selectInput.vue | 0 .../formModel/_source/timeoutAlarm.vue | 0 .../formModel/_source/workerGroups.vue | 0 .../dag/_source/formModel/formModel.scss | 0 .../pages/dag/_source/formModel/formModel.vue | 0 .../home/pages/dag/_source/formModel/log.vue | 0 .../formModel/tasks/_source/commcon.js | 0 .../formModel/tasks/_source/datasource.vue | 0 .../tasks/_source/dependItemList.vue | 0 .../formModel/tasks/_source/httpParams.vue | 0 .../formModel/tasks/_source/listBox.vue | 0 .../formModel/tasks/_source/localParams.vue | 0 .../formModel/tasks/_source/resources.vue | 0 .../formModel/tasks/_source/sqlType.vue | 0 .../formModel/tasks/_source/statementList.vue | 0 .../_source/formModel/tasks/_source/udfs.vue | 0 .../dag/_source/formModel/tasks/dependent.vue | 0 .../dag/_source/formModel/tasks/flink.vue | 0 .../dag/_source/formModel/tasks/http.vue | 0 .../pages/dag/_source/formModel/tasks/mr.vue | 0 .../dag/_source/formModel/tasks/procedure.vue | 0 .../dag/_source/formModel/tasks/python.vue | 0 .../dag/_source/formModel/tasks/shell.vue | 0 .../dag/_source/formModel/tasks/spark.vue | 0 .../pages/dag/_source/formModel/tasks/sql.vue | 0 .../_source/formModel/tasks/sub_process.vue | 0 .../pages/dag/_source/jumpAffirm/index.js | 0 .../dag/_source/jumpAffirm/jumpAffirm.vue | 0 .../pages/dag/_source/plugIn/downChart.js | 0 .../home/pages/dag/_source/plugIn/dragZoom.js | 0 .../pages/dag/_source/plugIn/jsPlumbHandle.js | 0 .../home/pages/dag/_source/plugIn/util.js | 0 .../pages/dag/_source/startingParam/index.vue | 0 .../dag/_source/udp/_source/selectTenant.vue | 0 .../conf/home/pages/dag/_source/udp/udp.vue | 0 .../home/pages/dag/_source/variable/index.vue | 0 .../dag/_source/variable/variablesView.vue | 0 .../conf/home/pages/dag/definitionDetails.vue | 0 .../src/js/conf/home/pages/dag/img/dag_bg.png | Bin .../conf/home/pages/dag/img/toobar_HTTP.png | Bin .../conf/home/pages/dag/img/toobar_flink.svg | 0 .../home/pages/dag/img/toolbar_DEPENDENT.png | Bin .../js/conf/home/pages/dag/img/toolbar_MR.png | Bin .../home/pages/dag/img/toolbar_PROCEDURE.png | Bin .../home/pages/dag/img/toolbar_PYTHON.png | Bin .../conf/home/pages/dag/img/toolbar_SHELL.png | Bin .../conf/home/pages/dag/img/toolbar_SPARK.png | Bin .../conf/home/pages/dag/img/toolbar_SQL.png | Bin .../pages/dag/img/toolbar_SUB_PROCESS.png | Bin .../src/js/conf/home/pages/dag/index.vue | 0 .../conf/home/pages/dag/instanceDetails.vue | 0 .../js/conf/home/pages/datasource/index.vue | 0 .../pages/list/_source/createDataSource.vue | 0 .../datasource/pages/list/_source/list.vue | 0 .../pages/datasource/pages/list/index.vue | 0 .../src/js/conf/home/pages/home/index.vue | 0 .../src/js/conf/home/pages/monitor/index.vue | 0 .../monitor/pages/servers/_source/gauge.vue | 0 .../pages/servers/_source/gaugeOption.js | 0 .../pages/servers/_source/zookeeperList.vue | 0 .../pages/monitor/pages/servers/alert.vue | 0 .../pages/monitor/pages/servers/apiserver.vue | 0 .../pages/monitor/pages/servers/master.vue | 0 .../pages/monitor/pages/servers/mysql.vue | 0 .../pages/monitor/pages/servers/rpcserver.vue | 0 .../pages/monitor/pages/servers/servers.scss | 0 .../monitor/pages/servers/statistics.vue | 0 .../pages/monitor/pages/servers/worker.vue | 0 .../pages/monitor/pages/servers/zookeeper.vue | 0 .../src/js/conf/home/pages/projects/index.vue | 0 .../_source/instanceConditions/common.js | 0 .../_source/instanceConditions/index.vue | 0 .../taskRecordList/_source/conditions.vue | 0 .../_source/taskRecordList/_source/list.vue | 0 .../pages/_source/taskRecordList/index.vue | 0 .../pages/projects/pages/definition/index.vue | 0 .../pages/definition/pages/create/index.vue | 0 .../pages/definition/pages/details/index.vue | 0 .../definition/pages/list/_source/email.vue | 0 .../definition/pages/list/_source/list.vue | 0 .../definition/pages/list/_source/start.vue | 0 .../definition/pages/list/_source/timing.vue | 0 .../definition/pages/list/_source/util.js | 0 .../pages/definition/pages/list/index.vue | 0 .../definition/pages/tree/_source/tree.js | 0 .../definition/pages/tree/_source/util.js | 0 .../definition/pages/tree/img/dag_bg.png | Bin .../pages/definition/pages/tree/index.vue | 0 .../pages/definition/timing/_source/list.vue | 0 .../pages/definition/timing/index.vue | 0 .../pages/historyTaskRecord/index.vue | 0 .../pages/index/_source/chartConfig.js | 0 .../pages/index/_source/commandStateCount.vue | 0 .../pages/index/_source/defineUserCount.vue | 0 .../pages/index/_source/processStateCount.vue | 0 .../pages/index/_source/queueCount.vue | 0 .../pages/index/_source/taskCtatusCount.vue | 0 .../home/pages/projects/pages/index/index.vue | 0 .../pages/projects/pages/instance/index.vue | 0 .../pages/instance/pages/details/index.vue | 0 .../instance/pages/gantt/_source/gantt.js | 0 .../pages/instance/pages/gantt/img/dag_bg.png | Bin .../pages/instance/pages/gantt/index.vue | 0 .../instance/pages/list/_source/list.vue | 0 .../pages/instance/pages/list/index.vue | 0 .../pages/list/_source/createProject.vue | 0 .../projects/pages/list/_source/list.vue | 0 .../home/pages/projects/pages/list/index.vue | 0 .../pages/taskInstance/_source/list.vue | 0 .../projects/pages/taskInstance/index.vue | 0 .../pages/projects/pages/taskRecord/index.vue | 0 .../pages/projects/pages/timing/index.vue | 0 .../src/js/conf/home/pages/resource/index.vue | 0 .../home/pages/resource/pages/file/index.vue | 0 .../pages/file/pages/_source/codemirror.js | 0 .../pages/file/pages/_source/common.js | 0 .../pages/file/pages/create/index.vue | 0 .../file/pages/details/_source/down_error.png | Bin .../file/pages/details/_source/noType.vue | 0 .../pages/file/pages/details/_source/utils.js | 0 .../pages/file/pages/details/index.vue | 0 .../resource/pages/file/pages/edit/index.vue | 0 .../pages/file/pages/list/_source/list.vue | 0 .../pages/file/pages/list/_source/rename.vue | 0 .../resource/pages/file/pages/list/index.vue | 0 .../home/pages/resource/pages/udf/index.vue | 0 .../udf/pages/function/_source/createUdf.vue | 0 .../pages/udf/pages/function/_source/list.vue | 0 .../pages/udf/pages/function/index.vue | 0 .../pages/udf/pages/resource/_source/list.vue | 0 .../udf/pages/resource/_source/rename.vue | 0 .../pages/udf/pages/resource/index.vue | 0 .../src/js/conf/home/pages/security/index.vue | 0 .../pages/queue/_source/createQueue.vue | 0 .../security/pages/queue/_source/list.vue | 0 .../home/pages/security/pages/queue/index.vue | 0 .../pages/tenement/_source/createTenement.vue | 0 .../security/pages/tenement/_source/list.vue | 0 .../pages/security/pages/tenement/index.vue | 0 .../home/pages/security/pages/token/index.vue | 0 .../pages/users/_source/createUser.vue | 0 .../security/pages/users/_source/list.vue | 0 .../home/pages/security/pages/users/index.vue | 0 .../warningGroups/_source/createWarning.vue | 0 .../pages/warningGroups/_source/list.vue | 0 .../security/pages/warningGroups/index.vue | 0 .../workerGroups/_source/createWorker.vue | 0 .../pages/workerGroups/_source/list.vue | 0 .../security/pages/workerGroups/index.vue | 0 .../src/js/conf/home/pages/user/index.vue | 0 .../pages/user/pages/account/_source/info.vue | 0 .../home/pages/user/pages/account/index.vue | 0 .../user/pages/password/_source/info.vue | 0 .../home/pages/user/pages/password/index.vue | 0 .../user/pages/token/_source/createToken.vue | 0 .../pages/user/pages/token/_source/list.vue | 0 .../home/pages/user/pages/token/index.vue | 0 .../src/js/conf/home/router/index.js | 0 .../src/js/conf/home/store/dag/actions.js | 0 .../src/js/conf/home/store/dag/getters.js | 0 .../src/js/conf/home/store/dag/index.js | 0 .../src/js/conf/home/store/dag/mutations.js | 0 .../src/js/conf/home/store/dag/state.js | 0 .../js/conf/home/store/datasource/actions.js | 0 .../js/conf/home/store/datasource/getters.js | 0 .../js/conf/home/store/datasource/index.js | 0 .../conf/home/store/datasource/mutations.js | 0 .../js/conf/home/store/datasource/state.js | 0 .../src/js/conf/home/store/index.js | 0 .../src/js/conf/home/store/monitor/actions.js | 0 .../src/js/conf/home/store/monitor/getters.js | 0 .../src/js/conf/home/store/monitor/index.js | 0 .../js/conf/home/store/monitor/mutations.js | 0 .../src/js/conf/home/store/monitor/state.js | 0 .../js/conf/home/store/projects/actions.js | 0 .../js/conf/home/store/projects/getters.js | 0 .../src/js/conf/home/store/projects/index.js | 0 .../js/conf/home/store/projects/mutations.js | 0 .../src/js/conf/home/store/projects/state.js | 0 .../js/conf/home/store/resource/actions.js | 0 .../js/conf/home/store/resource/getters.js | 0 .../src/js/conf/home/store/resource/index.js | 0 .../js/conf/home/store/resource/mutations.js | 0 .../src/js/conf/home/store/resource/state.js | 0 .../js/conf/home/store/security/actions.js | 0 .../js/conf/home/store/security/getters.js | 0 .../src/js/conf/home/store/security/index.js | 0 .../js/conf/home/store/security/mutations.js | 0 .../src/js/conf/home/store/security/state.js | 0 .../src/js/conf/home/store/user/actions.js | 0 .../src/js/conf/home/store/user/getters.js | 0 .../src/js/conf/home/store/user/index.js | 0 .../src/js/conf/home/store/user/mutations.js | 0 .../src/js/conf/home/store/user/state.js | 0 .../src/js/conf/login/App.vue | 0 .../src/js/conf/login/img/login-logo.png | Bin .../src/js/conf/login/index.js | 0 .../components/conditions/conditions.vue | 0 .../fileUpdate/definitionUpdate.vue | 0 .../components/fileUpdate/fileUpdate.vue | 0 .../components/fileUpdate/udfUpdate.vue | 0 .../js/module/components/layout/layout.vue | 0 .../module/components/listBoxF/listBoxF.vue | 0 .../listConstruction/listConstruction.vue | 0 .../src/js/module/components/nav/logo.png | Bin .../src/js/module/components/nav/m_logo.png | Bin .../src/js/module/components/nav/nav.vue | 0 .../components/noData/images/errorTip.png | Bin .../js/module/components/noData/noData.vue | 0 .../src/js/module/components/popup/popup.vue | 0 .../module/components/priority/priority.vue | 0 .../components/progressBar/progressBar.vue | 0 .../secondaryMenu/_source/close.png | Bin .../components/secondaryMenu/_source/menu.js | 0 .../components/secondaryMenu/_source/open.png | Bin .../secondaryMenu/secondaryMenu.vue | 0 .../src/js/module/components/spin/spin.vue | 0 .../components/tooltipsJSON/tooltipsJSON.vue | 0 .../module/components/transfer/transfer.vue | 0 .../src/js/module/download/index.js | 0 .../src/js/module/echarts/themeData.json | 0 .../src/js/module/filter/filter.js | 0 .../src/js/module/filter/formatDate.js | 0 .../src/js/module/i18n/config.js | 0 .../src/js/module/i18n/index.js | 0 .../src/js/module/i18n/locale/en_US.js | 0 .../src/js/module/i18n/locale/zh_CN.js | 0 .../src/js/module/io/index.js | 0 .../src/js/module/mixin/disabledState.js | 0 .../src/js/module/mixin/listUrlParamHandle.js | 0 .../src/js/module/permissions/index.js | 0 .../src/js/module/util/clickoutside.js | 0 .../src/js/module/util/cookie.js | 0 .../src/js/module/util/index.js | 0 .../src/js/module/util/localStorage.js | 0 .../src/js/module/util/routerUtil.js | 0 .../src/js/module/util/util.js | 0 .../src/lib/@analysys/ana-charts/README.md | 0 .../lib/@analysys/ana-charts/build/config.js | 0 .../ana-charts/build/webpack.config.prod.js | 0 .../lib/@analysys/ana-charts/dist/index.js | 0 .../@analysys/ana-charts/dist/index.js.map | 0 .../lib/@analysys/ana-charts/example/app.vue | 0 .../@analysys/ana-charts/example/index.html | 0 .../lib/@analysys/ana-charts/example/index.js | 0 .../@analysys/ana-charts/example/mock/data.js | 0 .../ana-charts/example/mock/theme.json | 0 .../ana-charts/example/packages/bar.vue | 0 .../ana-charts/example/packages/funnel.vue | 0 .../ana-charts/example/packages/line.vue | 0 .../ana-charts/example/packages/pie.vue | 0 .../ana-charts/example/packages/radar.vue | 0 .../ana-charts/example/packages/scatter.vue | 0 .../ana-charts/example/router/index.js | 0 .../ana-charts/example/styles/main.scss | 0 .../src/lib/@analysys/ana-charts/package.json | 0 .../@analysys/ana-charts/postcss.config.js | 0 .../src/lib/@analysys/ans-ui/README.md | 0 .../lib/@analysys/ans-ui/lib/ans-ui.min.css | 0 .../lib/@analysys/ans-ui/lib/ans-ui.min.js | 0 .../@analysys/ans-ui/lib/font/iconfont.eot | Bin .../@analysys/ans-ui/lib/font/iconfont.svg | 0 .../@analysys/ans-ui/lib/font/iconfont.ttf | Bin .../@analysys/ans-ui/lib/font/iconfont.woff | Bin .../src/lib/@analysys/ans-ui/lib/locale/en.js | 0 .../lib/@analysys/ans-ui/lib/locale/en.js.map | 0 .../lib/@analysys/ans-ui/lib/locale/zh-CN.js | 0 .../@analysys/ans-ui/lib/locale/zh-CN.js.map | 0 .../src/lib/@analysys/ans-ui/package.json | 0 .../ans-ui/packages/vue-box/README.md | 0 .../ans-ui/packages/vue-box/example/app.vue | 0 .../packages/vue-box/example/index.html | 0 .../ans-ui/packages/vue-box/example/index.js | 0 .../ans-ui/packages/vue-box/src/index.js | 0 .../packages/vue-box/src/source/base/Box.vue | 0 .../vue-box/src/source/base/BoxManager.vue | 0 .../packages/vue-box/src/source/base/index.js | 0 .../src/source/layer/message/message.js | 0 .../vue-box/src/source/layer/modal/modal.js | 0 .../vue-box/src/source/layer/notice/notice.js | 0 .../ans-ui/packages/vue-button/README.md | 0 .../packages/vue-button/example/app.vue | 0 .../packages/vue-button/example/index.html | 0 .../packages/vue-button/example/index.js | 0 .../ans-ui/packages/vue-button/src/index.js | 0 .../packages/vue-button/src/source/Button.vue | 0 .../vue-button/src/source/ButtonGroup.vue | 0 .../ans-ui/packages/vue-cascader/README.md | 0 .../packages/vue-cascader/example/app.vue | 0 .../packages/vue-cascader/example/data.js | 0 .../packages/vue-cascader/example/index.html | 0 .../packages/vue-cascader/example/index.js | 0 .../ans-ui/packages/vue-cascader/src/index.js | 0 .../vue-cascader/src/source/Cascader.vue | 0 .../vue-cascader/src/source/Caspanel.vue | 0 .../ans-ui/packages/vue-checkbox/README.md | 0 .../packages/vue-checkbox/example/app.vue | 0 .../packages/vue-checkbox/example/index.html | 0 .../packages/vue-checkbox/example/index.js | 0 .../ans-ui/packages/vue-checkbox/src/index.js | 0 .../vue-checkbox/src/source/Checkbox.vue | 0 .../vue-checkbox/src/source/CheckboxGroup.vue | 0 .../ans-ui/packages/vue-datepicker/README.md | 0 .../packages/vue-datepicker/example/app.vue | 0 .../vue-datepicker/example/index.html | 0 .../packages/vue-datepicker/example/index.js | 0 .../packages/vue-datepicker/src/index.js | 0 .../src/source/base/confirm.vue | 0 .../vue-datepicker/src/source/base/day.vue | 0 .../vue-datepicker/src/source/base/time.vue | 0 .../vue-datepicker/src/source/base/years.vue | 0 .../vue-datepicker/src/source/datepicker.vue | 0 .../vue-datepicker/src/source/panel/date.vue | 0 .../src/source/panel/daterange.vue | 0 .../vue-datepicker/src/source/panel/month.vue | 0 .../vue-datepicker/src/source/panel/time.vue | 0 .../vue-datepicker/src/source/panel/year.vue | 0 .../vue-datepicker/src/source/util/date.js | 0 .../vue-datepicker/src/source/util/isType.js | 0 .../vue-datepicker/src/source/util/isValid.js | 0 .../vue-datepicker/src/source/util/ishms.js | 0 .../vue-datepicker/src/source/util/todate.js | 0 .../ans-ui/packages/vue-drawer/README.md | 0 .../packages/vue-drawer/example/app.vue | 0 .../packages/vue-drawer/example/index.html | 0 .../packages/vue-drawer/example/index.js | 0 .../packages/vue-drawer/example/test.vue | 0 .../ans-ui/packages/vue-drawer/src/index.js | 0 .../packages/vue-drawer/src/source/drawer.js | 0 .../ans-ui/packages/vue-form/README.md | 0 .../ans-ui/packages/vue-form/example/app.vue | 0 .../packages/vue-form/example/index.html | 0 .../ans-ui/packages/vue-form/example/index.js | 0 .../ans-ui/packages/vue-form/src/index.js | 0 .../packages/vue-form/src/source/Form.vue | 0 .../packages/vue-form/src/source/FormItem.vue | 0 .../ans-ui/packages/vue-input/README.md | 0 .../ans-ui/packages/vue-input/example/app.vue | 0 .../packages/vue-input/example/index.html | 0 .../packages/vue-input/example/index.js | 0 .../ans-ui/packages/vue-input/src/index.js | 0 .../packages/vue-input/src/source/Input.vue | 0 .../src/source/util/calcTextareaHeight.js | 0 .../ans-ui/packages/vue-pagination/README.md | 0 .../packages/vue-pagination/example/app.vue | 0 .../vue-pagination/example/index.html | 0 .../packages/vue-pagination/example/index.js | 0 .../packages/vue-pagination/src/index.js | 0 .../vue-pagination/src/source/Page.vue | 0 .../ans-ui/packages/vue-poptip/README.md | 0 .../packages/vue-poptip/example/app.vue | 0 .../packages/vue-poptip/example/index.html | 0 .../packages/vue-poptip/example/index.js | 0 .../ans-ui/packages/vue-poptip/src/index.js | 0 .../packages/vue-poptip/src/source/Poptip.vue | 0 .../vue-poptip/src/source/directive.js | 0 .../ans-ui/packages/vue-progress/README.md | 0 .../packages/vue-progress/example/app.vue | 0 .../packages/vue-progress/example/index.html | 0 .../packages/vue-progress/example/index.js | 0 .../ans-ui/packages/vue-progress/src/index.js | 0 .../vue-progress/src/source/Progress.vue | 0 .../ans-ui/packages/vue-radio/README.md | 0 .../ans-ui/packages/vue-radio/example/app.vue | 0 .../packages/vue-radio/example/index.html | 0 .../packages/vue-radio/example/index.js | 0 .../ans-ui/packages/vue-radio/src/index.js | 0 .../packages/vue-radio/src/source/Radio.vue | 0 .../vue-radio/src/source/RadioGroup.vue | 0 .../ans-ui/packages/vue-scroller/README.md | 0 .../packages/vue-scroller/example/app.vue | 0 .../packages/vue-scroller/example/index.html | 0 .../packages/vue-scroller/example/index.js | 0 .../ans-ui/packages/vue-scroller/src/index.js | 0 .../src/source/HorizontalScrollbar.vue | 0 .../vue-scroller/src/source/Scroller.vue | 0 .../src/source/VerticalScrollbar.vue | 0 .../ans-ui/packages/vue-select/README.md | 0 .../packages/vue-select/example/app.vue | 0 .../packages/vue-select/example/async.vue | 0 .../packages/vue-select/example/dynamic.vue | 0 .../packages/vue-select/example/index.html | 0 .../packages/vue-select/example/index.js | 0 .../vue-select/example/navigation.vue | 0 .../ans-ui/packages/vue-select/src/index.js | 0 .../packages/vue-select/src/source/Option.vue | 0 .../vue-select/src/source/OptionGroup.vue | 0 .../packages/vue-select/src/source/Select.vue | 0 .../vue-select/src/source/SelectDropdown.vue | 0 .../ans-ui/packages/vue-spin/README.md | 0 .../ans-ui/packages/vue-spin/example/app.vue | 0 .../packages/vue-spin/example/index.html | 0 .../ans-ui/packages/vue-spin/example/index.js | 0 .../ans-ui/packages/vue-spin/src/index.js | 0 .../packages/vue-spin/src/source/Spin.vue | 0 .../packages/vue-spin/src/source/directive.js | 0 .../packages/vue-spin/src/source/service.js | 0 .../ans-ui/packages/vue-switch/README.md | 0 .../packages/vue-switch/example/app.vue | 0 .../packages/vue-switch/example/index.html | 0 .../packages/vue-switch/example/index.js | 0 .../ans-ui/packages/vue-switch/src/index.js | 0 .../packages/vue-switch/src/source/Switch.vue | 0 .../ans-ui/packages/vue-table/README.md | 0 .../ans-ui/packages/vue-table/example/app.vue | 0 .../packages/vue-table/example/array.vue | 0 .../packages/vue-table/example/dynamic.vue | 0 .../packages/vue-table/example/index.html | 0 .../packages/vue-table/example/index.js | 0 .../packages/vue-table/example/indexs.json | 0 .../packages/vue-table/example/paging.vue | 0 .../packages/vue-table/example/restrict.vue | 0 .../packages/vue-table/example/sort.vue | 0 .../packages/vue-table/example/tree.vue | 0 .../ans-ui/packages/vue-table/src/index.js | 0 .../packages/vue-table/src/source/Table.vue | 0 .../vue-table/src/source/TableBody.vue | 0 .../vue-table/src/source/TableColumn.vue | 0 .../vue-table/src/source/TableHeader.vue | 0 .../packages/vue-table/src/source/TableTd.vue | 0 .../packages/vue-table/src/source/TableTh.vue | 0 .../vue-table/src/source/cellRenderer.js | 0 .../packages/vue-table/src/source/layout.js | 0 .../vue-table/src/source/layoutObserver.js | 0 .../packages/vue-table/src/source/store.js | 0 .../ans-ui/packages/vue-timepicker/README.md | 0 .../packages/vue-timepicker/example/app.vue | 0 .../vue-timepicker/example/index.html | 0 .../packages/vue-timepicker/example/index.js | 0 .../packages/vue-timepicker/src/index.js | 0 .../vue-timepicker/src/source/Timepicker.vue | 0 .../ans-ui/packages/vue-tooltip/README.md | 0 .../packages/vue-tooltip/example/app.vue | 0 .../packages/vue-tooltip/example/index.html | 0 .../packages/vue-tooltip/example/index.js | 0 .../ans-ui/packages/vue-tooltip/src/index.js | 0 .../vue-tooltip/src/source/Tooltip.vue | 0 .../vue-tooltip/src/source/directive.js | 0 .../vue-tooltip/src/source/factory.js | 0 .../src/lib/@analysys/ans-ui/src/index.js | 0 .../lib/@analysys/ans-ui/src/locale/format.js | 0 .../lib/@analysys/ans-ui/src/locale/index.js | 0 .../@analysys/ans-ui/src/locale/lang/en.js | 0 .../@analysys/ans-ui/src/locale/lang/zh-CN.js | 0 .../src/style/animation/attentionSeekers.scss | 0 .../style/animation/bouncingEntrances.scss | 0 .../src/style/animation/bouncingExits.scss | 0 .../src/style/animation/fadingEntrances.scss | 0 .../src/style/animation/fadingExits.scss | 0 .../ans-ui/src/style/animation/flippers.scss | 0 .../ans-ui/src/style/animation/index.scss | 0 .../src/style/animation/lightspeed.scss | 0 .../style/animation/rotatingEntrances.scss | 0 .../src/style/animation/rotatingExits.scss | 0 .../src/style/animation/slidingEntrances.scss | 0 .../src/style/animation/slidingExits.scss | 0 .../ans-ui/src/style/animation/specials.scss | 0 .../src/style/animation/zoomEntrances.scss | 0 .../ans-ui/src/style/animation/zoomExits.scss | 0 .../@analysys/ans-ui/src/style/common.scss | 0 .../ans-ui/src/style/components/box/box.scss | 0 .../src/style/components/box/message.scss | 0 .../src/style/components/box/modal.scss | 0 .../src/style/components/box/notice.scss | 0 .../src/style/components/button/button.scss | 0 .../src/style/components/button/mixin.scss | 0 .../style/components/cascader/cascader.scss | 0 .../style/components/checkbox/checkbox.scss | 0 .../components/datepicker/datepicker.scss | 0 .../src/style/components/drawer/drawer.scss | 0 .../src/style/components/form/form.scss | 0 .../ans-ui/src/style/components/index.scss | 0 .../src/style/components/input/input.scss | 0 .../components/pagination/pagination.scss | 0 .../src/style/components/poptip/poptip.scss | 0 .../style/components/progress/progress.scss | 0 .../src/style/components/radio/radio.scss | 0 .../style/components/scroller/scroller.scss | 0 .../src/style/components/select/select.scss | 0 .../src/style/components/spin/spin.scss | 0 .../src/style/components/switch/switch.scss | 0 .../src/style/components/table/table.scss | 0 .../src/style/components/tooltip/tooltip.scss | 0 .../lib/@analysys/ans-ui/src/style/font.scss | 0 .../ans-ui/src/style/font/iconfont.eot | Bin .../ans-ui/src/style/font/iconfont.svg | 0 .../ans-ui/src/style/font/iconfont.ttf | Bin .../ans-ui/src/style/font/iconfont.woff | Bin .../lib/@analysys/ans-ui/src/style/index.scss | 0 .../lib/@analysys/ans-ui/src/style/vars.scss | 0 .../lib/@analysys/ans-ui/src/util/assist.js | 0 .../@analysys/ans-ui/src/util/constants.js | 0 .../ans-ui/src/util/customRenderer.js | 0 .../src/util/directives/clickOutside.js | 0 .../ans-ui/src/util/directives/index.js | 0 .../ans-ui/src/util/directives/mousewheel.js | 0 .../ans-ui/src/util/dom/animatedScroll.js | 0 .../@analysys/ans-ui/src/util/dom/class.js | 0 .../@analysys/ans-ui/src/util/dom/index.js | 0 .../ans-ui/src/util/dom/limitedLoop.js | 0 .../ans-ui/src/util/dom/scrollIntoView.js | 0 .../ans-ui/src/util/dom/scrollbarWidth.js | 0 .../@analysys/ans-ui/src/util/dom/style.js | 0 .../lib/@analysys/ans-ui/src/util/event.js | 0 .../lib/@analysys/ans-ui/src/util/index.js | 0 .../src/lib/@analysys/ans-ui/src/util/lang.js | 0 .../ans-ui/src/util/mixins/emitter.js | 0 .../@analysys/ans-ui/src/util/mixins/index.js | 0 .../ans-ui/src/util/mixins/locale.js | 0 .../ans-ui/src/util/mixins/popper.js | 0 .../src/lib/@fedor/io/README.md | 0 .../src/lib/@fedor/io/dist/io.esm.js | 0 .../src/lib/@fedor/io/dist/io.js | 0 .../src/lib/@fedor/io/dist/io.min.js | 0 .../src/lib/@fedor/io/package.json | 0 .../progress-webpack-plugin/lib/index.js | 0 .../progress-webpack-plugin/package.json | 0 .../.circleci/config.yml | 0 .../component-compiler-utils/.prettierrc.js | 0 .../component-compiler-utils/CHANGELOG.md | 0 .../@vue/component-compiler-utils/README.md | 0 .../dist/compileStyle.d.ts | 0 .../dist/compileStyle.js | 0 .../dist/compileTemplate.d.ts | 0 .../dist/compileTemplate.js | 0 .../component-compiler-utils/dist/index.d.ts | 0 .../component-compiler-utils/dist/index.js | 0 .../component-compiler-utils/dist/parse.d.ts | 0 .../component-compiler-utils/dist/parse.js | 0 .../dist/stylePlugins/scoped.d.ts | 0 .../dist/stylePlugins/scoped.js | 0 .../dist/stylePlugins/trim.d.ts | 0 .../dist/stylePlugins/trim.js | 0 .../dist/styleProcessors/index.d.ts | 0 .../dist/styleProcessors/index.js | 0 .../templateCompilerModules/assetUrl.d.ts | 0 .../dist/templateCompilerModules/assetUrl.js | 0 .../dist/templateCompilerModules/srcset.d.ts | 0 .../dist/templateCompilerModules/srcset.js | 0 .../dist/templateCompilerModules/utils.d.ts | 0 .../dist/templateCompilerModules/utils.js | 0 .../component-compiler-utils/dist/types.d.ts | 0 .../component-compiler-utils/dist/types.js | 0 .../lib/compileStyle.ts | 0 .../lib/compileTemplate.ts | 0 .../component-compiler-utils/lib/index.ts | 0 .../component-compiler-utils/lib/parse.ts | 0 .../lib/stylePlugins/scoped.ts | 0 .../lib/stylePlugins/trim.ts | 0 .../lib/styleProcessors/index.ts | 0 .../lib/templateCompilerModules/assetUrl.ts | 0 .../lib/templateCompilerModules/srcset.ts | 0 .../lib/templateCompilerModules/utils.ts | 0 .../component-compiler-utils/lib/types.ts | 0 .../component-compiler-utils/package.json | 0 .../src/lib/@vue/crontab/README.md | 0 .../src/lib/@vue/crontab/build/config.js | 0 .../@vue/crontab/build/webpack.config.prod.js | 0 .../src/lib/@vue/crontab/dist/index.css | 0 .../src/lib/@vue/crontab/dist/index.js | 0 .../src/lib/@vue/crontab/dist/index.js.map | 0 .../src/lib/@vue/crontab/example/app.vue | 0 .../src/lib/@vue/crontab/example/index.html | 0 .../src/lib/@vue/crontab/example/index.js | 0 .../src/lib/@vue/crontab/package.json | 0 .../src/lib/@vue/crontab/postcss.config.js | 0 .../src/lib/external/config.js | 0 .../src/lib/external/email.js | 0 .../src/sass/common/_animation.scss | 0 .../src/sass/common/_font.scss | 0 .../src/sass/common/_mixin.scss | 0 .../src/sass/common/_normalize.scss | 0 .../src/sass/common/_scrollbar.scss | 0 .../src/sass/common/_table.scss | 0 .../src/sass/common/index.scss | 0 .../src/sass/conf/home/index.scss | 0 .../src/sass/conf/login/index.scss | 0 .../src/view/common/meta.inc | 0 .../src/view/common/outro.inc | 0 .../gitbook-plugin-livereload/plugin.js | 0 .../view/docs/zh_CN/_book/gitbook/gitbook.js | 0 .../view/docs/zh_CN/_book/gitbook/theme.js | 0 .../docs/zh_CN/_book/images/addtenant.png | Bin .../docs/zh_CN/_book/images/auth_project.png | Bin .../docs/zh_CN/_book/images/auth_user.png | Bin .../zh_CN/_book/images/complement_data.png | Bin .../zh_CN/_book/images/definition_create.png | Bin .../zh_CN/_book/images/definition_edit.png | Bin .../zh_CN/_book/images/definition_list.png | Bin .../docs/zh_CN/_book/images/dependent_1.jpg | Bin .../docs/zh_CN/_book/images/dependent_2.jpg | Bin .../zh_CN/_book/images/dependent_edit.png | Bin .../zh_CN/_book/images/dependent_edit2.png | Bin .../zh_CN/_book/images/dependent_edit3.png | Bin .../zh_CN/_book/images/dependent_edit4.png | Bin .../docs/zh_CN/_book/images/file_create.png | Bin .../docs/zh_CN/_book/images/file_detail.png | Bin .../docs/zh_CN/_book/images/file_rename.png | Bin .../docs/zh_CN/_book/images/file_upload.png | Bin .../docs/zh_CN/_book/images/flink_edit.png | Bin .../view/docs/zh_CN/_book/images/gantt.png | Bin .../zh_CN/_book/images/global_parameter.png | Bin .../docs/zh_CN/_book/images/hive_edit.png | Bin .../docs/zh_CN/_book/images/hive_edit2.png | Bin .../zh_CN/_book/images/local_parameter.png | Bin .../view/docs/zh_CN/_book/images/login.png | Bin .../view/docs/zh_CN/_book/images/logout.png | Bin .../docs/zh_CN/_book/images/mail_edit.png | Bin .../view/docs/zh_CN/_book/images/master.png | Bin .../view/docs/zh_CN/_book/images/mr_edit.png | Bin .../view/docs/zh_CN/_book/images/mr_java.png | Bin .../docs/zh_CN/_book/images/mysql_edit.png | Bin .../zh_CN/_book/images/postgresql_edit.png | Bin .../zh_CN/_book/images/procedure_edit.png | Bin .../zh_CN/_book/images/process_instance.png | Bin .../_book/images/process_instance_edit.png | Bin .../docs/zh_CN/_book/images/project_edit.png | Bin .../docs/zh_CN/_book/images/project_index.png | Bin .../docs/zh_CN/_book/images/python_edit.png | Bin .../docs/zh_CN/_book/images/scheduler.png | Bin .../docs/zh_CN/_book/images/scheduler2.png | Bin .../docs/zh_CN/_book/images/shell_edit.png | Bin .../zh_CN/_book/images/spark_datesource.png | Bin .../docs/zh_CN/_book/images/spark_edit.png | Bin .../view/docs/zh_CN/_book/images/sql_edit.png | Bin .../zh_CN/_book/images/start_from_current.png | Bin .../_book/images/start_from_current2.png | Bin .../docs/zh_CN/_book/images/start_process.png | Bin .../zh_CN/_book/images/subprocess_edit.png | Bin .../docs/zh_CN/_book/images/task_history.png | Bin .../docs/zh_CN/_book/images/task_list.png | Bin .../view/docs/zh_CN/_book/images/task_log.png | Bin .../docs/zh_CN/_book/images/task_log2.png | Bin .../zh_CN/_book/images/toolbar_DEPENDENT.png | Bin .../docs/zh_CN/_book/images/toolbar_MR.png | Bin .../zh_CN/_book/images/toolbar_PROCEDURE.png | Bin .../zh_CN/_book/images/toolbar_PYTHON.png | Bin .../docs/zh_CN/_book/images/toolbar_SHELL.png | Bin .../docs/zh_CN/_book/images/toolbar_SPARK.png | Bin .../docs/zh_CN/_book/images/toolbar_SQL.png | Bin .../_book/images/toolbar_SUB_PROCESS.png | Bin .../docs/zh_CN/_book/images/tree_view.png | Bin .../view/docs/zh_CN/_book/images/udf_edit.png | Bin .../docs/zh_CN/_book/images/user_manager.png | Bin .../view/docs/zh_CN/_book/images/useredit.png | Bin .../docs/zh_CN/_book/images/useredit2.png | Bin .../view/docs/zh_CN/_book/images/userinfo.png | Bin .../docs/zh_CN/_book/images/variable_view.png | Bin .../zh_CN/_book/images/variable_view2.png | Bin .../view/docs/zh_CN/_book/images/worker.png | Bin .../src/view/docs/zh_CN/_book/index.html | 0 .../zh_CN/_book/pages/deploy-background.html | 0 .../zh_CN/_book/pages/deploy-foreground.html | 0 .../docs/zh_CN/_book/pages/development.html | 0 .../zh_CN/_book/pages/guide-architecture.html | 0 .../docs/zh_CN/_book/pages/guide-manual.html | 0 .../view/docs/zh_CN/_book/search_index.json | 0 .../src/view/home/index.html | 0 .../src/view/login/index.html | 0 escheduler-alert/pom.xml | 129 -- .../src/main/assembly/package.xml | 40 - .../java/cn/escheduler/alert/AlertServer.java | 89 - .../alert/manager/EmailManager.java | 55 - .../manager/EnterpriseWeChatManager.java | 57 - .../escheduler/alert/manager/MsgManager.java | 36 - .../escheduler/alert/runner/AlertSender.java | 142 -- .../cn/escheduler/alert/utils/Constants.java | 157 -- .../alert/utils/EnterpriseWeChatUtils.java | 248 --- .../cn/escheduler/alert/utils/ExcelUtils.java | 131 -- .../cn/escheduler/alert/utils/FuncUtils.java | 35 - .../cn/escheduler/alert/utils/JSONUtils.java | 68 - .../cn/escheduler/alert/utils/MailUtils.java | 459 ----- .../escheduler/alert/utils/PropertyUtils.java | 193 -- .../src/main/resources/alert_logback.xml | 31 - .../utils/EnterpriseWeChatUtilsTest.java | 119 -- .../escheduler/alert/utils/MailUtilsTest.java | 231 --- escheduler-api/pom.xml | 219 --- escheduler-api/src/main/assembly/package.xml | 74 - .../escheduler/api/ApiApplicationServer.java | 37 - .../api/CombinedApplicationServer.java | 57 - .../api/configuration/AppConfiguration.java | 115 -- .../ServiceModelToSwagger2MapperImpl.java | 509 ----- .../api/configuration/SwaggerConfig.java | 55 - .../api/controller/AccessTokenController.java | 186 -- .../api/controller/AlertGroupController.java | 253 --- .../api/controller/BaseController.java | 272 --- .../controller/DataAnalysisController.java | 200 -- .../api/controller/DataSourceController.java | 460 ----- .../api/controller/ExecutorController.java | 201 -- .../api/controller/LoggerController.java | 108 - .../api/controller/LoginController.java | 147 -- .../api/controller/MonitorController.java | 133 -- .../ProcessDefinitionController.java | 494 ----- .../controller/ProcessInstanceController.java | 367 ---- .../api/controller/ProjectController.java | 304 --- .../api/controller/QueueController.java | 202 -- .../api/controller/ResourcesController.java | 715 ------- .../api/controller/SchedulerController.java | 336 ---- .../controller/TaskInstanceController.java | 98 - .../api/controller/TaskRecordController.java | 114 -- .../api/controller/TenantController.java | 247 --- .../api/controller/UsersController.java | 456 ----- .../api/controller/WorkerGroupController.java | 171 -- .../escheduler/api/dto/CommandStateCount.java | 61 - .../cn/escheduler/api/dto/DefineUserDto.java | 55 - .../cn/escheduler/api/dto/ScheduleParam.java | 72 - .../cn/escheduler/api/dto/TaskCountDto.java | 135 -- .../cn/escheduler/api/dto/TaskStateCount.java | 50 - .../cn/escheduler/api/dto/gantt/GanttDto.java | 103 - .../cn/escheduler/api/dto/gantt/Task.java | 138 -- .../escheduler/api/dto/treeview/Instance.java | 171 -- .../api/dto/treeview/TreeViewDto.java | 84 - .../cn/escheduler/api/enums/ExecuteType.java | 40 - .../java/cn/escheduler/api/enums/Status.java | 266 --- .../api/interceptor/DruidStatFilter.java | 29 - .../api/interceptor/DruidStatViewServlet.java | 34 - .../interceptor/LoginHandlerInterceptor.java | 111 -- .../java/cn/escheduler/api/log/LogClient.java | 134 -- .../api/service/AccessTokenService.java | 180 -- .../api/service/AlertGroupService.java | 294 --- .../api/service/BaseDAGService.java | 89 - .../escheduler/api/service/BaseService.java | 130 -- .../api/service/DataAnalysisService.java | 416 ---- .../api/service/DataSourceService.java | 690 ------- .../api/service/ExecutorService.java | 540 ----- .../escheduler/api/service/LoggerService.java | 91 - .../api/service/MonitorService.java | 127 -- .../api/service/ProcessDefinitionService.java | 1123 ----------- .../api/service/ProcessInstanceService.java | 723 ------- .../api/service/ProjectService.java | 396 ---- .../escheduler/api/service/QueueService.java | 259 --- .../api/service/ResourcesService.java | 898 --------- .../api/service/SchedulerService.java | 594 ------ .../api/service/SessionService.java | 150 -- .../api/service/TaskInstanceService.java | 134 -- .../api/service/TaskRecordService.java | 85 - .../escheduler/api/service/TenantService.java | 300 --- .../api/service/UdfFuncService.java | 328 ---- .../escheduler/api/service/UsersService.java | 694 ------- .../api/service/WorkerGroupService.java | 157 -- .../cn/escheduler/api/utils/CheckUtils.java | 163 -- .../cn/escheduler/api/utils/Constants.java | 129 -- .../cn/escheduler/api/utils/FileUtils.java | 79 - .../cn/escheduler/api/utils/PageInfo.java | 117 -- .../java/cn/escheduler/api/utils/Result.java | 82 - .../escheduler/api/utils/ZooKeeperState.java | 211 -- .../api/utils/ZookeeperMonitor.java | 89 - .../src/main/resources/apiserver_logback.xml | 42 - .../src/main/resources/application.properties | 19 - .../src/main/resources/combined_logback.xml | 62 - .../cn/escheduler/api/HttpClientTest.java | 157 -- .../controller/AbstractControllerTest.java | 83 - .../DataAnalysisControllerTest.java | 97 - .../controller/DataSourceControllerTest.java | 81 - .../controller/ExecutorControllerTest.java | 74 - .../api/controller/LoggerControllerTest.java | 58 - .../api/controller/LoginControllerTest.java | 65 - .../api/controller/MonitorControllerTest.java | 82 - .../ProcessDefinitionControllerTest.java | 65 - .../ProcessInstanceControllerTest.java | 54 - .../api/controller/ProjectControllerTest.java | 60 - .../api/controller/QueueControllerTest.java | 133 -- .../controller/ResourcesControllerTest.java | 80 - .../controller/SchedulerControllerTest.java | 66 - .../TaskInstanceControllerTest.java | 65 - .../api/controller/TenantControllerTest.java | 54 - .../api/controller/UsersControllerTest.java | 53 - .../api/service/DataAnalysisServiceTest.java | 51 - .../api/service/DataSourceServiceTest.java | 53 - .../api/service/ExecutorServiceTest.java | 72 - .../api/service/LoggerServiceTest.java | 52 - .../service/ProcessDefinitionServiceTest.java | 90 - .../service/ProcessInstanceServiceTest.java | 95 - .../api/service/ResourcesServiceTest.java | 53 - .../api/service/SchedulerServiceTest.java | 58 - .../api/service/SessionServiceTest.java | 52 - .../api/service/TaskInstanceServiceTest.java | 62 - .../api/service/TenantServiceTest.java | 53 - .../api/service/UdfFuncServiceTest.java | 58 - .../api/service/UsersServiceTest.java | 54 - .../api/utils/ZookeeperMonitorUtilsTest.java | 30 - escheduler-common/pom.xml | 595 ------ .../java/cn/escheduler/common/Constants.java | 923 --------- .../java/cn/escheduler/common/IStoppable.java | 29 - .../escheduler/common/enums/AlertStatus.java | 27 - .../cn/escheduler/common/enums/AlertType.java | 27 - .../escheduler/common/enums/CommandType.java | 40 - .../cn/escheduler/common/enums/CycleEnum.java | 28 - .../cn/escheduler/common/enums/DataType.java | 35 - .../cn/escheduler/common/enums/DbType.java | 33 - .../escheduler/common/enums/DependResult.java | 31 - .../common/enums/DependStrategy.java | 29 - .../common/enums/DependentRelation.java | 25 - .../cn/escheduler/common/enums/Direct.java | 27 - .../common/enums/ExecutionStatus.java | 101 - .../common/enums/FailureStrategy.java | 30 - .../java/cn/escheduler/common/enums/Flag.java | 33 - .../common/enums/HttpCheckCondition.java | 30 - .../escheduler/common/enums/HttpMethod.java | 31 - .../common/enums/HttpParametersType.java | 29 - .../cn/escheduler/common/enums/Priority.java | 31 - .../escheduler/common/enums/ProgramType.java | 30 - .../escheduler/common/enums/ReleaseState.java | 40 - .../common/enums/ResUploadType.java | 29 - .../escheduler/common/enums/ResourceType.java | 27 - .../cn/escheduler/common/enums/RunMode.java | 28 - .../common/enums/SelfDependStrategy.java | 29 - .../escheduler/common/enums/ServerEnum.java | 29 - .../cn/escheduler/common/enums/ShowType.java | 34 - .../common/enums/TaskDependType.java | 30 - .../common/enums/TaskRecordStatus.java | 35 - .../common/enums/TaskStateType.java | 67 - .../common/enums/TaskTimeoutStrategy.java | 29 - .../cn/escheduler/common/enums/TaskType.java | 42 - .../cn/escheduler/common/enums/UdfType.java | 27 - .../cn/escheduler/common/enums/UserType.java | 28 - .../escheduler/common/enums/WarningType.java | 31 - .../escheduler/common/enums/ZKNodeType.java | 15 - .../java/cn/escheduler/common/graph/DAG.java | 519 ----- .../common/job/db/BaseDataSource.java | 112 -- .../common/job/db/ClickHouseDataSource.java | 75 - .../common/job/db/DataSourceFactory.java | 93 - .../common/job/db/HiveDataSource.java | 84 - .../common/job/db/MySQLDataSource.java | 73 - .../common/job/db/OracleDataSource.java | 75 - .../common/job/db/PostgreDataSource.java | 77 - .../common/job/db/SQLServerDataSource.java | 71 - .../common/job/db/SparkDataSource.java | 76 - .../escheduler/common/model/DateInterval.java | 62 - .../common/model/DependentItem.java | 80 - .../common/model/DependentTaskModel.java | 44 - .../escheduler/common/model/MasterServer.java | 130 -- .../cn/escheduler/common/model/TaskNode.java | 324 --- .../common/model/TaskNodeRelation.java | 67 - .../common/process/HttpProperty.java | 125 -- .../escheduler/common/process/ProcessDag.java | 85 - .../escheduler/common/process/Property.java | 143 -- .../common/process/ResourceInfo.java | 37 - .../escheduler/common/queue/ITaskQueue.java | 95 - .../common/queue/TaskQueueFactory.java | 54 - .../common/queue/TaskQueueZkImpl.java | 439 ----- .../common/shell/AbstractShell.java | 341 ---- .../common/shell/ShellExecutor.java | 175 -- .../common/task/AbstractParameters.java | 69 - .../escheduler/common/task/IParameters.java | 38 - .../common/task/TaskTimeoutParameter.java | 81 - .../task/dependent/DependentParameters.java | 58 - .../common/task/flink/FlinkParameters.java | 219 --- .../common/task/http/HttpParameters.java | 108 - .../common/task/mr/MapreduceParameters.java | 145 -- .../task/procedure/ProcedureParameters.java | 89 - .../common/task/python/PythonParameters.java | 67 - .../common/task/shell/ShellParameters.java | 70 - .../common/task/spark/SparkParameters.java | 220 --- .../escheduler/common/task/sql/SqlBinds.java | 42 - .../common/task/sql/SqlParameters.java | 213 -- .../escheduler/common/task/sql/SqlType.java | 27 - .../task/subprocess/SubProcessParameters.java | 48 - .../cn/escheduler/common/thread/Stopper.java | 39 - .../common/thread/ThreadPoolExecutors.java | 310 --- .../escheduler/common/thread/ThreadUtils.java | 202 -- .../cn/escheduler/common/utils/Bytes.java | 697 ------- .../common/utils/CollectionUtils.java | 292 --- .../escheduler/common/utils/CommonUtils.java | 94 - .../common/utils/ConnectionUtils.java | 104 - .../cn/escheduler/common/utils/DateUtils.java | 356 ---- .../common/utils/DependentUtils.java | 144 -- .../common/utils/EncryptionUtils.java | 37 - .../common/utils/EnumFieldUtil.java | 37 - .../cn/escheduler/common/utils/FileUtils.java | 428 ---- .../escheduler/common/utils/HadoopUtils.java | 585 ------ .../cn/escheduler/common/utils/HttpUtils.java | 100 - .../cn/escheduler/common/utils/IpUtils.java | 64 - .../cn/escheduler/common/utils/JSONUtils.java | 271 --- .../cn/escheduler/common/utils/OSUtils.java | 297 --- .../common/utils/ParameterUtils.java | 176 -- .../common/utils/PropertyUtils.java | 215 -- .../cn/escheduler/common/utils/ResInfo.java | 154 -- .../escheduler/common/utils/SchemaUtils.java | 150 -- .../escheduler/common/utils/ScriptRunner.java | 317 --- .../common/utils/TaskParametersUtils.java | 80 - .../utils/dependent/DependentDateUtils.java | 169 -- .../utils/placeholder/BusinessTimeUtils.java | 72 - .../utils/placeholder/PlaceholderUtils.java | 99 - .../PropertyPlaceholderHelper.java | 254 --- .../placeholder/TimePlaceholderUtils.java | 512 ----- .../common/zk/AbstractZKClient.java | 583 ------ .../cn/escheduler/common/graph/DAGTest.java | 356 ---- .../cn/escheduler/common/os/OSUtilsTest.java | 146 -- .../cn/escheduler/common/os/OshiTest.java | 112 -- .../common/queue/TaskQueueImplTest.java | 112 -- .../common/shell/ShellExecutorTest.java | 76 - .../threadutils/ThreadPoolExecutorsTest.java | 53 - .../common/utils/CollectionUtilsTest.java | 63 - .../common/utils/CommonUtilsTest.java | 61 - .../common/utils/DateUtilsTest.java | 57 - .../common/utils/DependentUtilsTest.java | 200 -- .../common/utils/FileUtilsTest.java | 28 - .../common/utils/HadoopUtilsTest.java | 90 - .../common/utils/HttpUtilsTest.java | 47 - .../escheduler/common/utils/IpUtilsTest.java | 41 - .../common/utils/JSONUtilsTest.java | 158 -- .../common/utils/PropertyUtilsTest.java | 31 - .../escheduler/common/utils/StringTest.java | 60 - .../placeholder/TimePlaceholderUtilsTest.java | 70 - .../common/zk/StandaloneZKServerForTest.java | 100 - escheduler-dao/pom.xml | 192 -- .../cn/escheduler/dao/AbstractBaseDao.java | 27 - .../main/java/cn/escheduler/dao/AlertDao.java | 172 -- .../src/main/java/cn/escheduler/dao/App.java | 11 - .../java/cn/escheduler/dao/DaoFactory.java | 64 - .../java/cn/escheduler/dao/MonitorDBDao.java | 145 -- .../java/cn/escheduler/dao/ProcessDao.java | 1747 ----------------- .../java/cn/escheduler/dao/TaskRecordDao.java | 299 --- .../dao/config/MybatisPlusConfig.java | 17 - .../cn/escheduler/dao/config/YmlConfig.java | 59 - .../dao/datasource/ConnectionFactory.java | 99 - .../cn/escheduler/dao/entity/AccessToken.java | 108 - .../java/cn/escheduler/dao/entity/Alert.java | 236 --- .../cn/escheduler/dao/entity/AlertGroup.java | 111 -- .../cn/escheduler/dao/entity/Command.java | 288 --- .../escheduler/dao/entity/CommandCount.java | 58 - .../dao/entity/CycleDependency.java | 93 - .../cn/escheduler/dao/entity/DataSource.java | 196 -- .../escheduler/dao/entity/DatasourceUser.java | 121 -- .../dao/entity/DefinitionGroupByUser.java | 62 - .../cn/escheduler/dao/entity/Dependency.java | 62 - .../escheduler/dao/entity/ErrorCommand.java | 297 --- .../dao/entity/ExecuteStatusCount.java | 61 - .../escheduler/dao/entity/MonitorRecord.java | 115 -- .../cn/escheduler/dao/entity/ProcessData.java | 105 - .../dao/entity/ProcessDefinition.java | 384 ---- .../dao/entity/ProcessInstance.java | 620 ------ .../dao/entity/ProcessInstanceMap.java | 93 - .../cn/escheduler/dao/entity/Project.java | 211 -- .../cn/escheduler/dao/entity/ProjectUser.java | 141 -- .../java/cn/escheduler/dao/entity/Queue.java | 106 - .../cn/escheduler/dao/entity/Resource.java | 217 -- .../escheduler/dao/entity/ResourcesUser.java | 123 -- .../cn/escheduler/dao/entity/Schedule.java | 307 --- .../cn/escheduler/dao/entity/Session.java | 95 - .../escheduler/dao/entity/TaskInstance.java | 515 ----- .../cn/escheduler/dao/entity/TaskRecord.java | 256 --- .../java/cn/escheduler/dao/entity/Tenant.java | 167 -- .../cn/escheduler/dao/entity/UDFUser.java | 123 -- .../cn/escheduler/dao/entity/UdfFunc.java | 232 --- .../java/cn/escheduler/dao/entity/User.java | 273 --- .../escheduler/dao/entity/UserAlertGroup.java | 140 -- .../cn/escheduler/dao/entity/WorkerGroup.java | 96 - .../escheduler/dao/entity/WorkerServer.java | 127 -- .../dao/entity/ZookeeperRecord.java | 217 -- .../dao/mapper/AccessTokenMapper.java | 38 - .../dao/mapper/AlertGroupMapper.java | 45 - .../cn/escheduler/dao/mapper/AlertMapper.java | 31 - .../escheduler/dao/mapper/CommandMapper.java | 47 - .../dao/mapper/DataSourceMapper.java | 44 - .../dao/mapper/DataSourceUserMapper.java | 31 - .../dao/mapper/ErrorCommandMapper.java | 34 - .../dao/mapper/ProcessDefinitionMapper.java | 48 - .../dao/mapper/ProcessInstanceMapMapper.java | 38 - .../dao/mapper/ProcessInstanceMapper.java | 74 - .../escheduler/dao/mapper/ProjectMapper.java | 46 - .../dao/mapper/ProjectUserMapper.java | 30 - .../cn/escheduler/dao/mapper/QueueMapper.java | 37 - .../escheduler/dao/mapper/ResourceMapper.java | 72 - .../dao/mapper/ResourceUserMapper.java | 28 - .../escheduler/dao/mapper/ScheduleMapper.java | 39 - .../escheduler/dao/mapper/SessionMapper.java | 29 - .../dao/mapper/TaskInstanceMapper.java | 72 - .../escheduler/dao/mapper/TenantMapper.java | 34 - .../escheduler/dao/mapper/UDFUserMapper.java | 30 - .../escheduler/dao/mapper/UdfFuncMapper.java | 44 - .../dao/mapper/UserAlertGroupMapper.java | 34 - .../cn/escheduler/dao/mapper/UserMapper.java | 48 - .../dao/mapper/WorkerGroupMapper.java | 36 - .../dao/upgrade/DolphinSchedulerManager.java | 114 -- .../dao/upgrade/MysqlUpgradeDao.java | 101 - .../dao/upgrade/PostgresqlUpgradeDao.java | 133 -- .../cn/escheduler/dao/upgrade/UpgradeDao.java | 359 ---- .../upgrade/shell/CreateDolphinScheduler.java | 45 - .../upgrade/shell/InitDolphinScheduler.java | 38 - .../shell/UpgradeDolphinScheduler.java | 43 - .../cn/escheduler/dao/utils/BeanContext.java | 51 - .../cn/escheduler/dao/utils/DagHelper.java | 330 ---- .../escheduler/dao/utils/PropertyUtils.java | 192 -- .../dao/utils/cron/AbstractCycle.java | 169 -- .../escheduler/dao/utils/cron/CronUtils.java | 207 -- .../dao/utils/cron/CycleFactory.java | 211 -- .../escheduler/dao/utils/cron/CycleLinks.java | 64 - .../AccessTokenMapper.xml | 16 - .../AlertGroupMapper.xml | 30 - .../cn.escheduler.dao.mapper/AlertMapper.xml | 9 - .../CommandMapper.xml | 26 - .../DataSourceMapper.xml | 62 - .../DataSourceUserMapper.xml | 13 - .../ErrorCommandMapper.xml | 19 - .../ProcessDefinitionMapper.xml | 56 - .../ProcessInstanceMapMapper.xml | 26 - .../ProcessInstanceMapper.xml | 123 -- .../ProjectMapper.xml | 65 - .../ProjectUserMapper.xml | 19 - .../cn.escheduler.dao.mapper/QueueMapper.xml | 25 - .../ResourceMapper.xml | 57 - .../ResourceUserMapper.xml | 15 - .../ScheduleMapper.xml | 41 - .../SessionMapper.xml | 9 - .../TaskInstanceMapper.xml | 96 - .../cn.escheduler.dao.mapper/TenantMapper.xml | 24 - .../UDFUserMapper.xml | 12 - .../UdfFuncMapper.xml | 51 - .../UserAlertGroupMapper.xml | 20 - .../cn.escheduler.dao.mapper/UserMapper.xml | 55 - .../WorkerGroupMapper.xml | 23 - .../cn/escheduler/dao/cron/CronUtilsTest.java | 181 -- .../dao/mapper/AccessTokenMapperTest.java | 94 - .../dao/mapper/AlertGroupMapperTest.java | 136 -- .../dao/mapper/AlertMapperTest.java | 89 - .../dao/mapper/CommandMapperTest.java | 153 -- .../dao/mapper/DataSourceMapperTest.java | 150 -- .../dao/mapper/DataSourceUserMapperTest.java | 91 - .../dao/mapper/ErrorCommandMapperTest.java | 113 -- .../mapper/ProcessDefinitionMapperTest.java | 199 -- .../mapper/ProcessInstanceMapMapperTest.java | 133 -- .../dao/mapper/ProcessInstanceMapperTest.java | 287 --- .../dao/mapper/ProjectMapperTest.java | 56 - .../dao/mapper/ProjectUserMapperTest.java | 95 - .../dao/mapper/QueueMapperTest.java | 108 - .../dao/mapper/ResourceMapperTest.java | 52 - .../dao/mapper/ResourceUserMapperTest.java | 87 - .../dao/mapper/ScheduleMapperTest.java | 193 -- .../dao/mapper/SessionMapperTest.java | 82 - .../dao/mapper/TaskInstanceMapperTest.java | 64 - .../dao/mapper/TenantMapperTest.java | 125 -- .../dao/mapper/UDFUserMapperTest.java | 36 - .../dao/mapper/UdfFuncMapperTest.java | 48 - .../dao/mapper/UserAlertGroupMapperTest.java | 40 - .../escheduler/dao/mapper/UserMapperTest.java | 326 --- .../dao/mapper/WorkerGroupMapperTest.java | 113 -- escheduler-rpc/pom.xml | 121 -- .../cn/escheduler/rpc/LogViewServiceGrpc.java | 499 ----- escheduler-rpc/src/main/proto/scheduler.proto | 83 - escheduler-server/pom.xml | 127 -- .../src/main/assembly/package.xml | 74 - .../server/master/AbstractServer.java | 85 - .../server/master/MasterServer.java | 277 --- .../server/master/log/MasterLogFilter.java | 42 - .../runner/MasterBaseTaskExecThread.java | 132 -- .../master/runner/MasterExecThread.java | 1042 ---------- .../master/runner/MasterSchedulerThread.java | 108 - .../master/runner/MasterTaskExecThread.java | 165 -- .../runner/SubProcessTaskExecThread.java | 178 -- .../quartz/DruidConnectionProvider.java | 203 -- .../server/quartz/ProcessScheduleJob.java | 138 -- .../server/quartz/QuartzExecutors.java | 311 --- .../cn/escheduler/server/rpc/LogClient.java | 122 -- .../escheduler/server/rpc/LoggerServer.java | 219 --- .../escheduler/server/utils/AlertManager.java | 240 --- .../server/utils/FlinkArgsUtils.java | 110 -- .../escheduler/server/utils/LoggerUtils.java | 87 - .../escheduler/server/utils/ParamUtils.java | 102 - .../escheduler/server/utils/ProcessUtils.java | 320 --- .../server/utils/SparkArgsUtils.java | 117 -- .../cn/escheduler/server/utils/UDFUtils.java | 109 - .../server/worker/WorkerServer.java | 335 ---- .../worker/log/TaskLogDiscriminator.java | 68 - .../server/worker/log/TaskLogFilter.java | 36 - .../server/worker/log/WorkerLogFilter.java | 40 - .../server/worker/runner/FetchTaskThread.java | 324 --- .../worker/runner/TaskScheduleThread.java | 328 ---- .../worker/task/AbstractCommandExecutor.java | 569 ------ .../server/worker/task/AbstractTask.java | 213 -- .../server/worker/task/AbstractYarnTask.java | 92 - .../worker/task/PythonCommandExecutor.java | 158 -- .../worker/task/ShellCommandExecutor.java | 96 - .../server/worker/task/TaskManager.java | 73 - .../server/worker/task/TaskProps.java | 290 --- .../task/dependent/DependentExecute.java | 211 -- .../worker/task/dependent/DependentTask.java | 172 -- .../server/worker/task/flink/FlinkTask.java | 118 -- .../server/worker/task/http/HttpTask.java | 270 --- .../server/worker/task/mr/MapReduceTask.java | 144 -- .../task/processdure/ProcedureTask.java | 346 ---- .../server/worker/task/python/PythonTask.java | 140 -- .../server/worker/task/shell/ShellTask.java | 160 -- .../server/worker/task/spark/SparkTask.java | 109 - .../server/worker/task/sql/SqlTask.java | 447 ----- .../escheduler/server/zk/ZKMasterClient.java | 411 ---- .../escheduler/server/zk/ZKWorkerClient.java | 166 -- .../src/main/resources/master_logback.xml | 34 - .../src/main/resources/worker_logback.xml | 61 - .../server/master/AlertManagerTest.java | 107 - .../server/master/MasterCommandTest.java | 140 -- .../escheduler/server/master/ParamsTest.java | 102 - .../escheduler/server/worker/EnvFileTest.java | 64 - .../shell/ShellCommandExecutorTest.java | 103 - .../server/worker/sql/SqlExecutorTest.java | 146 -- .../task/dependent/DependentTaskTest.java | 63 - .../server/zk/StandaloneZKServerForTest.java | 102 - .../server/zk/ZKWorkerClientTest.java | 37 - escheduler-ui/install-escheduler-ui.sh | 219 --- package.xml | 10 +- pom.xml | 42 +- ...cheduler.sh => create-dolphinscheduler.sh} | 0 ...r-daemon.sh => dolphinscheduler-daemon.sh} | 0 ...heduler.sh => upgrade-dolphinscheduler.sh} | 0 1625 files changed, 65891 insertions(+), 65963 deletions(-) create mode 100644 dolphinscheduler-alert/pom.xml create mode 100644 dolphinscheduler-alert/src/main/assembly/package.xml create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EmailManager.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EnterpriseWeChatManager.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/MsgManager.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/runner/AlertSender.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/ExcelUtils.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java create mode 100644 dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java rename {escheduler-alert => dolphinscheduler-alert}/src/main/resources/alert.properties (100%) create mode 100644 dolphinscheduler-alert/src/main/resources/alert_logback.xml rename {escheduler-alert => dolphinscheduler-alert}/src/main/resources/application_alert.properties (100%) rename {escheduler-alert => dolphinscheduler-alert}/src/main/resources/mail_templates/alert_mail_template.ftl (100%) create mode 100644 dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java create mode 100644 dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java create mode 100644 dolphinscheduler-api/pom.xml create mode 100644 dolphinscheduler-api/src/main/assembly/package.xml create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/CombinedApplicationServer.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/SwaggerConfig.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/CommandStateCount.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DefineUserDto.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ScheduleParam.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskStateCount.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/gantt/GanttDto.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/gantt/Task.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/Instance.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/TreeViewDto.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/ExecuteType.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/DruidStatFilter.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/DruidStatViewServlet.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/log/LogClient.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Constants.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitor.java create mode 100644 dolphinscheduler-api/src/main/resources/apiserver_logback.xml create mode 100644 dolphinscheduler-api/src/main/resources/application.properties create mode 100644 dolphinscheduler-api/src/main/resources/combined_logback.xml rename {escheduler-api => dolphinscheduler-api}/src/main/resources/i18n/messages.properties (100%) rename {escheduler-api => dolphinscheduler-api}/src/main/resources/i18n/messages_en_US.properties (100%) rename {escheduler-api => dolphinscheduler-api}/src/main/resources/i18n/messages_zh_CN.properties (100%) create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/HttpClientTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitorUtilsTest.java rename {escheduler-api => dolphinscheduler-api}/src/test/resources/dao/data_source.properties (100%) create mode 100644 dolphinscheduler-common/pom.xml create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/IStoppable.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertStatus.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CycleEnum.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DataType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependResult.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependStrategy.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependentRelation.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Direct.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/FailureStrategy.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Flag.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpCheckCondition.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpMethod.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpParametersType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Priority.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ProgramType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ReleaseState.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResUploadType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResourceType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/RunMode.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SelfDependStrategy.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ServerEnum.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ShowType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskDependType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskRecordStatus.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskTimeoutStrategy.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UserType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/WarningType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ZKNodeType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/BaseDataSource.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/ClickHouseDataSource.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DataSourceFactory.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/HiveDataSource.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/MySQLDataSource.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/OracleDataSource.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/PostgreDataSource.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SQLServerDataSource.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SparkDataSource.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DateInterval.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentTaskModel.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/MasterServer.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNodeRelation.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/HttpProperty.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ProcessDag.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/Property.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/ITaskQueue.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueFactory.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueZkImpl.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/ShellExecutor.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/IParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/TaskTimeoutParameter.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/procedure/ProcedureParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/python/PythonParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/shell/ShellParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlBinds.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlType.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/Stopper.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadPoolExecutors.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Bytes.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DependentUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EncryptionUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EnumFieldUtil.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SchemaUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/dependent/DependentDateUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/BusinessTimeUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PropertyPlaceholderHelper.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/AbstractZKClient.java rename {escheduler-common => dolphinscheduler-common}/src/main/resources/common/common.properties (100%) rename {escheduler-common => dolphinscheduler-common}/src/main/resources/common/hadoop/hadoop.properties (100%) rename {escheduler-common => dolphinscheduler-common}/src/main/resources/quartz.properties (100%) rename {escheduler-common => dolphinscheduler-common}/src/main/resources/zookeeper.properties (100%) create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/graph/DAGTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OshiTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/TaskQueueImplTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/shell/ShellExecutorTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DateUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PropertyUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/StandaloneZKServerForTest.java rename {escheduler-common => dolphinscheduler-common}/src/test/resources/dao/data_source.properties (100%) create mode 100644 dolphinscheduler-dao/pom.xml create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AbstractBaseDao.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/App.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoFactory.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/ProcessDao.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/config/MybatisPlusConfig.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/config/YmlConfig.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java rename {escheduler-dao/src/main/java/cn/escheduler => dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler}/dao/datasource/DatabaseConfiguration.java (100%) create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AccessToken.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Alert.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AlertGroup.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/CommandCount.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/CycleDependency.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DataSource.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DatasourceUser.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DefinitionGroupByUser.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Dependency.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ExecuteStatusCount.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/MonitorRecord.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessData.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstanceMap.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectUser.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Queue.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ResourcesUser.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Session.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskRecord.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Tenant.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UDFUser.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/User.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UserAlertGroup.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerGroup.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerServer.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ZookeeperRecord.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/QueueMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/SessionMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TenantMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/BeanContext.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PropertyUtils.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/AbstractCycle.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CronUtils.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleFactory.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleLinks.java rename {escheduler-dao => dolphinscheduler-dao}/src/main/resources/application.yml (100%) create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AccessTokenMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertGroupMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/CommandMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceUserMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ErrorCommandMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessDefinitionMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectUserMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/QueueMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceUserMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ScheduleMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/SessionMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TaskInstanceMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TenantMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UDFUserMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UdfFuncMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserAlertGroupMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/WorkerGroupMapper.xml rename {escheduler-dao => dolphinscheduler-dao}/src/main/resources/dao/data_source.properties__ (100%) create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/cron/CronUtilsTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/SessionMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java create mode 100644 dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapperTest.java rename {escheduler-dao => dolphinscheduler-dao}/src/test/resources/dao/data_source.properties (100%) create mode 100644 dolphinscheduler-rpc/pom.xml create mode 100644 dolphinscheduler-rpc/src/main/java/org/apache/dolphinscheduler/rpc/LogViewServiceGrpc.java create mode 100644 dolphinscheduler-rpc/src/main/proto/scheduler.proto create mode 100644 dolphinscheduler-server/pom.xml create mode 100644 dolphinscheduler-server/src/main/assembly/package.xml create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/AbstractServer.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/log/MasterLogFilter.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/DruidConnectionProvider.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/ProcessScheduleJob.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/QuartzExecutors.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LogClient.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LoggerServer.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/LoggerUtils.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/SparkArgsUtils.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/TaskLogDiscriminator.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/TaskLogFilter.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/WorkerLogFilter.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskProps.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java create mode 100644 dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java rename {escheduler-server => dolphinscheduler-server}/src/main/resources/application_master.properties (100%) rename {escheduler-server => dolphinscheduler-server}/src/main/resources/application_worker.properties (100%) rename {escheduler-server => dolphinscheduler-server}/src/main/resources/master.properties (100%) create mode 100644 dolphinscheduler-server/src/main/resources/master_logback.xml rename {escheduler-server => dolphinscheduler-server}/src/main/resources/worker.properties (100%) create mode 100644 dolphinscheduler-server/src/main/resources/worker_logback.xml create mode 100644 dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/AlertManagerTest.java create mode 100644 dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java create mode 100644 dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ParamsTest.java create mode 100644 dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/EnvFileTest.java create mode 100644 dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java create mode 100644 dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java create mode 100644 dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTaskTest.java create mode 100644 dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/StandaloneZKServerForTest.java create mode 100644 dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClientTest.java rename {escheduler-server => dolphinscheduler-server}/src/test/resources/dao/data_source.properties (100%) rename {escheduler-ui => dolphinscheduler-ui}/.babelrc (100%) rename {escheduler-ui => dolphinscheduler-ui}/.editorconfig (100%) rename {escheduler-ui => dolphinscheduler-ui}/.env (100%) rename {escheduler-ui => dolphinscheduler-ui}/.eslintrc (100%) rename {escheduler-ui => dolphinscheduler-ui}/build/combo.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/build/config.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/build/webpack.config.combined.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/build/webpack.config.dev.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/build/webpack.config.prod.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/build/webpack.config.test.js (100%) create mode 100644 dolphinscheduler-ui/install-escheduler-ui.sh rename {escheduler-ui => dolphinscheduler-ui}/package.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/combo/1.0.0/3rd.css (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/combo/1.0.0/3rd.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/combo/1.0.0/base.css (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/combo/1.0.0/es5.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/combo/1.0.0/local.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/awesome/FontAwesome.otf (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/awesome/font-awesome.css (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/awesome/fontawesome-webfont.eot (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/awesome/fontawesome-webfont.svg (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/awesome/fontawesome-webfont.ttf (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/awesome/fontawesome-webfont.woff (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/awesome/fontawesome-webfont.woff2 (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/demo.css (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/demo_index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/iconfont.css (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/iconfont.eot (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/iconfont.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/iconfont.svg (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/iconfont.ttf (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/iconfont.woff (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/font/iconfont.woff2 (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/images/favicon.ico (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/App.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/config.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/dag.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/dag.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/dag.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/_source/selectInput.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/_source/timeoutAlarm.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/_source/workerGroups.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/formModel.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/formModel.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/log.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/commcon.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/dependItemList.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/httpParams.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/listBox.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/localParams.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/resources.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/sqlType.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/statementList.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/udfs.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/dependent.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/flink.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/http.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/procedure.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/sql.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/formModel/tasks/sub_process.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/jumpAffirm/jumpAffirm.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/plugIn/downChart.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/plugIn/dragZoom.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/plugIn/util.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/startingParam/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/udp/_source/selectTenant.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/udp/udp.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/variable/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/_source/variable/variablesView.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/definitionDetails.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/dag_bg.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toobar_HTTP.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toobar_flink.svg (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toolbar_DEPENDENT.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toolbar_MR.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toolbar_PROCEDURE.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toolbar_PYTHON.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toolbar_SHELL.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toolbar_SPARK.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toolbar_SQL.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/img/toolbar_SUB_PROCESS.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/dag/instanceDetails.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/datasource/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/datasource/pages/list/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/datasource/pages/list/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/home/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/_source/gauge.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/_source/gaugeOption.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/_source/zookeeperList.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/alert.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/apiserver.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/master.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/mysql.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/rpcserver.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/servers.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/statistics.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/worker.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/monitor/pages/servers/zookeeper.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/_source/instanceConditions/common.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/conditions.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/_source/taskRecordList/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/create/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/details/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/email.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/start.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/timing.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/util.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/list/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/tree.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/util.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/tree/img/dag_bg.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/pages/tree/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/timing/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/definition/timing/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/historyTaskRecord/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/index/_source/chartConfig.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/index/_source/commandStateCount.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/index/_source/defineUserCount.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/index/_source/processStateCount.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/index/_source/queueCount.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/index/_source/taskCtatusCount.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/index/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/instance/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/instance/pages/details/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/instance/pages/gantt/_source/gantt.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/instance/pages/gantt/img/dag_bg.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/instance/pages/gantt/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/instance/pages/list/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/list/_source/createProject.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/list/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/list/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/taskInstance/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/taskRecord/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/projects/pages/timing/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/_source/codemirror.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/_source/common.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/create/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/details/_source/down_error.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/details/_source/noType.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/details/_source/utils.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/details/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/edit/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/list/_source/rename.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/file/pages/list/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/udf/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/createUdf.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/udf/pages/function/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/rename.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/resource/pages/udf/pages/resource/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/queue/_source/createQueue.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/queue/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/queue/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/tenement/_source/createTenement.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/tenement/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/tenement/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/token/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/users/_source/createUser.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/users/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/users/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/warningGroups/_source/createWarning.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/warningGroups/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/warningGroups/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/workerGroups/_source/createWorker.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/workerGroups/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/security/pages/workerGroups/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/user/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/user/pages/account/_source/info.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/user/pages/account/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/user/pages/password/_source/info.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/user/pages/password/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/user/pages/token/_source/createToken.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/user/pages/token/_source/list.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/pages/user/pages/token/index.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/router/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/dag/actions.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/dag/getters.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/dag/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/dag/mutations.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/dag/state.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/datasource/actions.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/datasource/getters.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/datasource/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/datasource/mutations.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/datasource/state.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/monitor/actions.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/monitor/getters.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/monitor/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/monitor/mutations.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/monitor/state.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/projects/actions.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/projects/getters.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/projects/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/projects/mutations.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/projects/state.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/resource/actions.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/resource/getters.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/resource/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/resource/mutations.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/resource/state.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/security/actions.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/security/getters.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/security/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/security/mutations.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/security/state.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/user/actions.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/user/getters.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/user/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/user/mutations.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/home/store/user/state.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/login/App.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/login/img/login-logo.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/conf/login/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/conditions/conditions.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/fileUpdate/definitionUpdate.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/fileUpdate/fileUpdate.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/fileUpdate/udfUpdate.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/layout/layout.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/listBoxF/listBoxF.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/listConstruction/listConstruction.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/nav/logo.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/nav/m_logo.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/nav/nav.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/noData/images/errorTip.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/noData/noData.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/popup/popup.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/priority/priority.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/progressBar/progressBar.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/secondaryMenu/_source/close.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/secondaryMenu/_source/menu.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/secondaryMenu/_source/open.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/secondaryMenu/secondaryMenu.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/spin/spin.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/tooltipsJSON/tooltipsJSON.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/components/transfer/transfer.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/download/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/echarts/themeData.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/filter/filter.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/filter/formatDate.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/i18n/config.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/i18n/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/i18n/locale/en_US.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/i18n/locale/zh_CN.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/io/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/mixin/disabledState.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/mixin/listUrlParamHandle.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/permissions/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/util/clickoutside.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/util/cookie.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/util/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/util/localStorage.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/util/routerUtil.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/js/module/util/util.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/build/config.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/build/webpack.config.prod.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/dist/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/dist/index.js.map (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/mock/data.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/mock/theme.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/packages/bar.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/packages/funnel.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/packages/line.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/packages/pie.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/packages/radar.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/packages/scatter.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/router/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/example/styles/main.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/package.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ana-charts/postcss.config.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/ans-ui.min.css (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/ans-ui.min.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/font/iconfont.eot (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/font/iconfont.svg (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/font/iconfont.ttf (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/font/iconfont.woff (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/locale/en.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/locale/en.js.map (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js.map (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/package.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/Box.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/BoxManager.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/message/message.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/modal/modal.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/notice/notice.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-button/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-button/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-button/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-button/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-button/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-button/src/source/Button.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-button/src/source/ButtonGroup.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-cascader/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-cascader/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-cascader/example/data.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-cascader/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Cascader.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Caspanel.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-checkbox/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/Checkbox.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/CheckboxGroup.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/confirm.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/day.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/time.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/years.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/datepicker.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/date.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/daterange.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/month.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/time.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/year.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/date.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isType.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isValid.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/ishms.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/todate.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-drawer/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-drawer/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-drawer/example/test.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-drawer/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-drawer/src/source/drawer.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-form/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-form/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-form/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-form/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-form/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-form/src/source/Form.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-form/src/source/FormItem.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-input/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-input/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-input/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-input/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-input/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-input/src/source/Input.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-input/src/source/util/calcTextareaHeight.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-pagination/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-pagination/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-pagination/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-pagination/src/source/Page.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-poptip/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-poptip/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-poptip/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/Poptip.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/directive.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-progress/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-progress/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-progress/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-progress/src/source/Progress.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-radio/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-radio/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-radio/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/Radio.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/RadioGroup.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-scroller/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-scroller/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-scroller/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/HorizontalScrollbar.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/Scroller.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/VerticalScrollbar.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/example/async.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/example/dynamic.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/example/navigation.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Option.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/src/source/OptionGroup.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Select.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-select/src/source/SelectDropdown.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-spin/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-spin/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-spin/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/Spin.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/directive.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/service.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-switch/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-switch/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-switch/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-switch/src/source/Switch.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/array.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/dynamic.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/indexs.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/paging.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/restrict.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/sort.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/example/tree.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/Table.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableBody.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableColumn.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableHeader.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTd.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTh.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/cellRenderer.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layout.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layoutObserver.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-table/src/source/store.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-timepicker/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/source/Timepicker.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-tooltip/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/Tooltip.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/directive.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/factory.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/locale/format.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/locale/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/locale/lang/en.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/locale/lang/zh-CN.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/attentionSeekers.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/bouncingEntrances.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/bouncingExits.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/fadingEntrances.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/fadingExits.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/flippers.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/index.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/lightspeed.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/rotatingEntrances.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/rotatingExits.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/slidingEntrances.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/slidingExits.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/specials.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/zoomEntrances.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/animation/zoomExits.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/common.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/box/box.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/box/message.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/box/modal.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/box/notice.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/button/button.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/button/mixin.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/cascader/cascader.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/checkbox/checkbox.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/datepicker/datepicker.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/drawer/drawer.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/form/form.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/index.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/input/input.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/pagination/pagination.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/poptip/poptip.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/progress/progress.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/radio/radio.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/scroller/scroller.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/select/select.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/spin/spin.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/switch/switch.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/table/table.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/components/tooltip/tooltip.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/font.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/font/iconfont.eot (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/font/iconfont.svg (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/font/iconfont.ttf (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/font/iconfont.woff (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/index.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/style/vars.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/assist.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/constants.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/customRenderer.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/directives/clickOutside.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/directives/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/directives/mousewheel.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/dom/animatedScroll.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/dom/class.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/dom/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/dom/limitedLoop.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/dom/scrollIntoView.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/dom/scrollbarWidth.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/dom/style.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/event.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/lang.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/mixins/emitter.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/mixins/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/mixins/locale.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@analysys/ans-ui/src/util/mixins/popper.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@fedor/io/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@fedor/io/dist/io.esm.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@fedor/io/dist/io.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@fedor/io/dist/io.min.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@fedor/io/package.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@fedor/progress-webpack-plugin/lib/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@fedor/progress-webpack-plugin/package.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/.circleci/config.yml (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/.prettierrc.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/CHANGELOG.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/compileStyle.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/compileStyle.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/compileTemplate.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/compileTemplate.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/index.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/parse.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/parse.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/types.d.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/dist/types.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/compileStyle.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/compileTemplate.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/index.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/parse.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/stylePlugins/scoped.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/stylePlugins/trim.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/styleProcessors/index.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/assetUrl.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/srcset.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/utils.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/lib/types.ts (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/component-compiler-utils/package.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/README.md (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/build/config.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/build/webpack.config.prod.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/dist/index.css (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/dist/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/dist/index.js.map (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/example/app.vue (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/example/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/example/index.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/package.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/@vue/crontab/postcss.config.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/external/config.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/lib/external/email.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/sass/common/_animation.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/sass/common/_font.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/sass/common/_mixin.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/sass/common/_normalize.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/sass/common/_scrollbar.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/sass/common/_table.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/sass/common/index.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/sass/conf/home/index.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/sass/conf/login/index.scss (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/common/meta.inc (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/common/outro.inc (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/gitbook/gitbook-plugin-livereload/plugin.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/gitbook/gitbook.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/gitbook/theme.js (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/addtenant.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/auth_project.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/auth_user.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/complement_data.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/definition_create.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/definition_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/definition_list.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/dependent_1.jpg (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/dependent_2.jpg (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/dependent_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/dependent_edit2.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/dependent_edit3.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/dependent_edit4.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/file_create.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/file_detail.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/file_rename.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/file_upload.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/flink_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/gantt.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/global_parameter.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/hive_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/hive_edit2.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/local_parameter.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/login.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/logout.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/mail_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/master.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/mr_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/mr_java.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/mysql_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/postgresql_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/procedure_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/process_instance.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/process_instance_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/project_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/project_index.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/python_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/scheduler.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/scheduler2.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/shell_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/spark_datesource.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/spark_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/sql_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/start_from_current.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/start_from_current2.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/start_process.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/subprocess_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/task_history.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/task_list.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/task_log.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/task_log2.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/toolbar_DEPENDENT.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/toolbar_MR.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/toolbar_PROCEDURE.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/toolbar_PYTHON.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/toolbar_SHELL.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/toolbar_SPARK.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/toolbar_SQL.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/toolbar_SUB_PROCESS.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/tree_view.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/udf_edit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/user_manager.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/useredit.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/useredit2.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/userinfo.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/variable_view.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/variable_view2.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/images/worker.png (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/pages/deploy-background.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/pages/deploy-foreground.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/pages/development.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/pages/guide-architecture.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/pages/guide-manual.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/docs/zh_CN/_book/search_index.json (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/home/index.html (100%) rename {escheduler-ui => dolphinscheduler-ui}/src/view/login/index.html (100%) delete mode 100644 escheduler-alert/pom.xml delete mode 100644 escheduler-alert/src/main/assembly/package.xml delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/AlertServer.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/manager/EmailManager.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/manager/EnterpriseWeChatManager.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/manager/MsgManager.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/runner/AlertSender.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/utils/Constants.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/utils/EnterpriseWeChatUtils.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/utils/ExcelUtils.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/utils/FuncUtils.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/utils/JSONUtils.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/utils/MailUtils.java delete mode 100644 escheduler-alert/src/main/java/cn/escheduler/alert/utils/PropertyUtils.java delete mode 100644 escheduler-alert/src/main/resources/alert_logback.xml delete mode 100644 escheduler-alert/src/test/java/cn/escheduler/alert/utils/EnterpriseWeChatUtilsTest.java delete mode 100644 escheduler-alert/src/test/java/cn/escheduler/alert/utils/MailUtilsTest.java delete mode 100644 escheduler-api/pom.xml delete mode 100644 escheduler-api/src/main/assembly/package.xml delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/ApiApplicationServer.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/CombinedApplicationServer.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/configuration/AppConfiguration.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/configuration/SwaggerConfig.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/AccessTokenController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/AlertGroupController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/BaseController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/DataAnalysisController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/ExecutorController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/LoggerController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/LoginController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/MonitorController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/ProjectController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/QueueController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/ResourcesController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/TaskInstanceController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/TaskRecordController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/TenantController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/UsersController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/controller/WorkerGroupController.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/dto/CommandStateCount.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/dto/DefineUserDto.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/dto/ScheduleParam.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/dto/TaskCountDto.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/dto/TaskStateCount.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/dto/gantt/GanttDto.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/dto/gantt/Task.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/dto/treeview/Instance.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/dto/treeview/TreeViewDto.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/enums/ExecuteType.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/interceptor/DruidStatFilter.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/interceptor/DruidStatViewServlet.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/interceptor/LoginHandlerInterceptor.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/log/LogClient.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/AccessTokenService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/AlertGroupService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/BaseDAGService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/BaseService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/DataAnalysisService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/ExecutorService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/LoggerService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/MonitorService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/ProjectService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/QueueService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/SessionService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/TaskInstanceService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/TaskRecordService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/UdfFuncService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/service/WorkerGroupService.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/utils/Constants.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/utils/FileUtils.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/utils/PageInfo.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/utils/Result.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/utils/ZooKeeperState.java delete mode 100644 escheduler-api/src/main/java/cn/escheduler/api/utils/ZookeeperMonitor.java delete mode 100644 escheduler-api/src/main/resources/apiserver_logback.xml delete mode 100644 escheduler-api/src/main/resources/application.properties delete mode 100644 escheduler-api/src/main/resources/combined_logback.xml delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/HttpClientTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/AbstractControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/DataAnalysisControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/DataSourceControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/ExecutorControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/LoggerControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/LoginControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/MonitorControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/ProcessDefinitionControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/ProcessInstanceControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/ProjectControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/QueueControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/ResourcesControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/SchedulerControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/TaskInstanceControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/TenantControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/controller/UsersControllerTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/DataAnalysisServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/DataSourceServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/ExecutorServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/LoggerServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/ResourcesServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/SchedulerServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/SessionServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/TaskInstanceServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/TenantServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/UdfFuncServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/service/UsersServiceTest.java delete mode 100644 escheduler-api/src/test/java/cn/escheduler/api/utils/ZookeeperMonitorUtilsTest.java delete mode 100644 escheduler-common/pom.xml delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/Constants.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/IStoppable.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/AlertStatus.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/AlertType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/CommandType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/CycleEnum.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/DataType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/DbType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/DependResult.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/DependStrategy.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/DependentRelation.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/Direct.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/ExecutionStatus.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/FailureStrategy.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/Flag.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/HttpCheckCondition.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/HttpMethod.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/HttpParametersType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/Priority.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/ProgramType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/ReleaseState.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/ResUploadType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/ResourceType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/RunMode.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/SelfDependStrategy.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/ServerEnum.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/ShowType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/TaskDependType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/TaskRecordStatus.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/TaskStateType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/TaskTimeoutStrategy.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/TaskType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/UdfType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/UserType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/WarningType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/enums/ZKNodeType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/graph/DAG.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/job/db/BaseDataSource.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/job/db/ClickHouseDataSource.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/job/db/DataSourceFactory.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/job/db/HiveDataSource.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/job/db/MySQLDataSource.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/job/db/OracleDataSource.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/job/db/PostgreDataSource.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/job/db/SQLServerDataSource.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/job/db/SparkDataSource.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/model/DateInterval.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/model/DependentItem.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/model/DependentTaskModel.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/model/MasterServer.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/model/TaskNode.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/model/TaskNodeRelation.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/process/HttpProperty.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/process/ProcessDag.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/process/Property.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/process/ResourceInfo.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/queue/ITaskQueue.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueFactory.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueZkImpl.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/shell/AbstractShell.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/shell/ShellExecutor.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/AbstractParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/IParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/TaskTimeoutParameter.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/dependent/DependentParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/flink/FlinkParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/http/HttpParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/mr/MapreduceParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/procedure/ProcedureParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/python/PythonParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/shell/ShellParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/spark/SparkParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlBinds.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlType.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/task/subprocess/SubProcessParameters.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/thread/Stopper.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/thread/ThreadPoolExecutors.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/thread/ThreadUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/Bytes.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/CollectionUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/ConnectionUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/DateUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/DependentUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/EncryptionUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/EnumFieldUtil.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/FileUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/HttpUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/IpUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/JSONUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/OSUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/ParameterUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/PropertyUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/ResInfo.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/SchemaUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/ScriptRunner.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/TaskParametersUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/dependent/DependentDateUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/BusinessTimeUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/PlaceholderUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/PropertyPlaceholderHelper.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/TimePlaceholderUtils.java delete mode 100644 escheduler-common/src/main/java/cn/escheduler/common/zk/AbstractZKClient.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/graph/DAGTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/os/OSUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/os/OshiTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/queue/TaskQueueImplTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/shell/ShellExecutorTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/threadutils/ThreadPoolExecutorsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/CollectionUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/CommonUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/DateUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/DependentUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/FileUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/HadoopUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/HttpUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/IpUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/JSONUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/PropertyUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/StringTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java delete mode 100644 escheduler-common/src/test/java/cn/escheduler/common/zk/StandaloneZKServerForTest.java delete mode 100644 escheduler-dao/pom.xml delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/AbstractBaseDao.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/AlertDao.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/App.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/DaoFactory.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/MonitorDBDao.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/config/MybatisPlusConfig.java delete mode 100755 escheduler-dao/src/main/java/cn/escheduler/dao/config/YmlConfig.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/datasource/ConnectionFactory.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/AccessToken.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/Alert.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/AlertGroup.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/Command.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/CommandCount.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/CycleDependency.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/DataSource.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/DatasourceUser.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/DefinitionGroupByUser.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/Dependency.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/ErrorCommand.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/ExecuteStatusCount.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/MonitorRecord.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessData.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessDefinition.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstance.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstanceMap.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/Project.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProjectUser.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/Queue.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/Resource.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/ResourcesUser.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/Schedule.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/Session.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/TaskInstance.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/TaskRecord.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/Tenant.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/UDFUser.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/UdfFunc.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/User.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/UserAlertGroup.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/WorkerGroup.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/WorkerServer.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/entity/ZookeeperRecord.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AccessTokenMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AlertGroupMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AlertMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/CommandMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceUserMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ErrorCommandMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectUserMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/QueueMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceUserMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/SessionMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TenantMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UDFUserMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UdfFuncMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserAlertGroupMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerGroupMapper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/DolphinSchedulerManager.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/MysqlUpgradeDao.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/PostgresqlUpgradeDao.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/UpgradeDao.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/CreateDolphinScheduler.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/InitDolphinScheduler.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/utils/BeanContext.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/utils/DagHelper.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/utils/PropertyUtils.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/AbstractCycle.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CronUtils.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CycleFactory.java delete mode 100644 escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CycleLinks.java delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AccessTokenMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertGroupMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/CommandMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceUserMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ErrorCommandMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessDefinitionMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectUserMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/QueueMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceUserMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ScheduleMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/SessionMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TaskInstanceMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TenantMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UDFUserMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UdfFuncMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserAlertGroupMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserMapper.xml delete mode 100644 escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/WorkerGroupMapper.xml delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/cron/CronUtilsTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AccessTokenMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AlertGroupMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AlertMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/CommandMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/DataSourceMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/DataSourceUserMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ErrorCommandMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessInstanceMapMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessInstanceMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProjectMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProjectUserMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/QueueMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ResourceMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ResourceUserMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ScheduleMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/SessionMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/TaskInstanceMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/TenantMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UDFUserMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UdfFuncMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserAlertGroupMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserMapperTest.java delete mode 100644 escheduler-dao/src/test/java/cn/escheduler/dao/mapper/WorkerGroupMapperTest.java delete mode 100644 escheduler-rpc/pom.xml delete mode 100644 escheduler-rpc/src/main/java/cn/escheduler/rpc/LogViewServiceGrpc.java delete mode 100644 escheduler-rpc/src/main/proto/scheduler.proto delete mode 100644 escheduler-server/pom.xml delete mode 100644 escheduler-server/src/main/assembly/package.xml delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/master/AbstractServer.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/master/MasterServer.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/master/log/MasterLogFilter.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterBaseTaskExecThread.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterExecThread.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterSchedulerThread.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterTaskExecThread.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/master/runner/SubProcessTaskExecThread.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/quartz/DruidConnectionProvider.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/quartz/ProcessScheduleJob.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/quartz/QuartzExecutors.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/rpc/LogClient.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/rpc/LoggerServer.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/utils/AlertManager.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/utils/FlinkArgsUtils.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/utils/LoggerUtils.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/utils/ParamUtils.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/utils/ProcessUtils.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/utils/SparkArgsUtils.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/utils/UDFUtils.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/WorkerServer.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/log/TaskLogDiscriminator.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/log/TaskLogFilter.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/log/WorkerLogFilter.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/runner/FetchTaskThread.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/runner/TaskScheduleThread.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractCommandExecutor.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractYarnTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/PythonCommandExecutor.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/ShellCommandExecutor.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/TaskManager.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/TaskProps.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/dependent/DependentExecute.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/dependent/DependentTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/flink/FlinkTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/http/HttpTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/mr/MapReduceTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/processdure/ProcedureTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/python/PythonTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/shell/ShellTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/spark/SparkTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java delete mode 100644 escheduler-server/src/main/java/cn/escheduler/server/zk/ZKWorkerClient.java delete mode 100644 escheduler-server/src/main/resources/master_logback.xml delete mode 100644 escheduler-server/src/main/resources/worker_logback.xml delete mode 100644 escheduler-server/src/test/java/cn/escheduler/server/master/AlertManagerTest.java delete mode 100644 escheduler-server/src/test/java/cn/escheduler/server/master/MasterCommandTest.java delete mode 100644 escheduler-server/src/test/java/cn/escheduler/server/master/ParamsTest.java delete mode 100644 escheduler-server/src/test/java/cn/escheduler/server/worker/EnvFileTest.java delete mode 100644 escheduler-server/src/test/java/cn/escheduler/server/worker/shell/ShellCommandExecutorTest.java delete mode 100644 escheduler-server/src/test/java/cn/escheduler/server/worker/sql/SqlExecutorTest.java delete mode 100644 escheduler-server/src/test/java/cn/escheduler/server/worker/task/dependent/DependentTaskTest.java delete mode 100644 escheduler-server/src/test/java/cn/escheduler/server/zk/StandaloneZKServerForTest.java delete mode 100644 escheduler-server/src/test/java/cn/escheduler/server/zk/ZKWorkerClientTest.java delete mode 100755 escheduler-ui/install-escheduler-ui.sh rename script/{create-escheduler.sh => create-dolphinscheduler.sh} (100%) rename script/{escheduler-daemon.sh => dolphinscheduler-daemon.sh} (100%) rename script/{upgrade-escheduler.sh => upgrade-dolphinscheduler.sh} (100%) diff --git a/docs/en_US/EasyScheduler-FAQ.md b/docs/en_US/EasyScheduler-FAQ.md index b55b0e2413..65b3add815 100644 --- a/docs/en_US/EasyScheduler-FAQ.md +++ b/docs/en_US/EasyScheduler-FAQ.md @@ -50,7 +50,7 @@ A: We also support **the priority of processes and tasks**. Priority We have fiv ---- -## Q: Escheduler-grpc gives an error +## Q: dolphinscheduler-grpc gives an error A: Execute in the root directory: mvn -U clean package assembly:assembly -Dmaven.test.skip=true , then refresh the entire project @@ -70,11 +70,11 @@ A: Install **npm install node-sass --unsafe-perm** separately, then **npm instal ## Q: UI cannot log in normally. -A: 1, if it is node startup, check whether the .env API_BASE configuration under escheduler-ui is the Api Server service address. +A: 1, if it is node startup, check whether the .env API_BASE configuration under dolphinscheduler-ui is the Api Server service address. - 2, If it is nginx booted and installed via **install-escheduler-ui.sh**, check if the proxy_pass configuration in **/etc/nginx/conf.d/escheduler.conf** is the Api Server service. address + 2, If it is nginx booted and installed via **install-dolphinscheduler-ui.sh**, check if the proxy_pass configuration in **/etc/nginx/conf.d/dolphinscheduler.conf** is the Api Server service. address -  3, if the above configuration is correct, then please check if the Api Server service is normal, curl http://192.168.xx.xx:12345/escheduler/users/get-user-info, check the Api Server log, if Prompt cn.escheduler.api.interceptor.LoginHandlerInterceptor:[76] - session info is null, which proves that the Api Server service is normal. +  3, if the above configuration is correct, then please check if the Api Server service is normal, curl http://192.168.xx.xx:12345/dolphinscheduler/users/get-user-info, check the Api Server log, if Prompt cn.dolphinscheduler.api.interceptor.LoginHandlerInterceptor:[76] - session info is null, which proves that the Api Server service is normal. 4, if there is no problem above, you need to check if **server.context-path and server.port configuration** in **application.properties** is correct @@ -84,7 +84,7 @@ A: 1, if it is node startup, check whether the .env API_BASE configuration under A: 1, first **check whether the MasterServer service exists through jps**, or directly check whether there is a master service in zk from the service monitoring. -​ 2,If there is a master service, check **the command status statistics** or whether new records are added in **t_escheduler_error_command**. If it is added, **please check the message field.** +​ 2,If there is a master service, check **the command status statistics** or whether new records are added in **t_dolphinscheduler_error_command**. If it is added, **please check the message field.** --- @@ -102,9 +102,9 @@ A: 1, **first check whether the WorkerServer service exists through jps**, or A: Provide Docker image and Dockerfile. -Docker image address: https://hub.docker.com/r/escheduler/escheduler_images +Docker image address: https://hub.docker.com/r/dolphinscheduler/dolphinscheduler_images -Dockerfile address: https://github.com/qiaozhanwei/escheduler_dockerfile/tree/master/docker_escheduler +Dockerfile address: https://github.com/qiaozhanwei/dolphinscheduler_dockerfile/tree/master/docker_dolphinscheduler ------ @@ -112,9 +112,9 @@ Dockerfile address: https://github.com/qiaozhanwei/escheduler_dockerfile/tree/ma A: 1, if the replacement variable contains special characters, **use the \ transfer character to transfer** -​ 2, installPath="/data1_1T/escheduler", **this directory can not be the same as the install.sh directory currently installed with one click.** +​ 2, installPath="/data1_1T/dolphinscheduler", **this directory can not be the same as the install.sh directory currently installed with one click.** -​ 3, deployUser = "escheduler", **the deployment user must have sudo privileges**, because the worker is executed by sudo -u tenant sh xxx.command +​ 3, deployUser = "dolphinscheduler", **the deployment user must have sudo privileges**, because the worker is executed by sudo -u tenant sh xxx.command ​ 4, monitorServerState = "false", whether the service monitoring script is started, the default is not to start the service monitoring script. **If the service monitoring script is started, the master and worker services are monitored every 5 minutes, and if the machine is down, it will automatically restart.** @@ -126,7 +126,7 @@ A: 1, if the replacement variable contains special characters, **use the \ tra ## Q : Process definition and process instance offline exception -A : For **versions prior to 1.0.4**, modify the code under the escheduler-api cn.escheduler.api.quartz package. +A : For **versions prior to 1.0.4**, modify the code under the dolphinscheduler-api cn.dolphinscheduler.api.quartz package. ``` public boolean deleteJob(String jobName, String jobGroupName) { @@ -207,7 +207,7 @@ A: 1, in **the process definition list**, click the **Start** button. ## Q : Python task setting Python version -A: 1,**for the version after 1.0.3** only need to modify PYTHON_HOME in conf/env/.escheduler_env.sh +A: 1,**for the version after 1.0.3** only need to modify PYTHON_HOME in conf/env/.dolphinscheduler_env.sh ``` export PYTHON_HOME=/bin/python diff --git a/docs/en_US/SUMMARY.md b/docs/en_US/SUMMARY.md index 397a4a110c..63be19a451 100644 --- a/docs/en_US/SUMMARY.md +++ b/docs/en_US/SUMMARY.md @@ -34,7 +34,7 @@ * Backend development documentation * [Environmental requirements](backend-development.md#Environmental requirements) * [Project compilation](backend-development.md#Project compilation) -* [Interface documentation](http://52.82.13.76:8888/escheduler/doc.html?language=en_US&lang=en) +* [Interface documentation](http://52.82.13.76:8888/dolphinscheduler/doc.html?language=en_US&lang=en) * FAQ * [FAQ](EasyScheduler-FAQ.md) * EasyScheduler upgrade documentation diff --git a/docs/en_US/backend-deployment.md b/docs/en_US/backend-deployment.md index 934a005f6b..68c8b4bd9a 100644 --- a/docs/en_US/backend-deployment.md +++ b/docs/en_US/backend-deployment.md @@ -7,7 +7,7 @@ There are two deployment modes for the backend: ## Preparations -Download the latest version of the installation package, download address: [gitee download](https://gitee.com/easyscheduler/EasyScheduler/attach_files/) or [github download](https://github.com/analysys/EasyScheduler/releases), download escheduler-backend-x.x.x.tar.gz(back-end referred to as escheduler-backend),escheduler-ui-x.x.x.tar.gz(front-end referred to as escheduler-ui) +Download the latest version of the installation package, download address: [gitee download](https://gitee.com/easyscheduler/EasyScheduler/attach_files/) or [github download](https://github.com/apache/incubator-dolphinscheduler/releases), download dolphinscheduler-backend-x.x.x.tar.gz(back-end referred to as dolphinscheduler-backend),dolphinscheduler-ui-x.x.x.tar.gz(front-end referred to as dolphinscheduler-ui) @@ -32,8 +32,8 @@ Download the latest version of the installation package, download address: [gi ``` vi /etc/sudoers -# For example, the deployment user is an escheduler account -escheduler ALL=(ALL) NOPASSWD: NOPASSWD: ALL +# For example, the deployment user is an dolphinscheduler account +dolphinscheduler ALL=(ALL) NOPASSWD: NOPASSWD: ALL # And you need to comment out the Default requiretty line #Default requiretty @@ -51,9 +51,9 @@ Configure SSH secret-free login on deployment machines and other installation ma Execute the following command to create database and account ``` - CREATE DATABASE escheduler DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci; - GRANT ALL PRIVILEGES ON escheduler.* TO '{user}'@'%' IDENTIFIED BY '{password}'; - GRANT ALL PRIVILEGES ON escheduler.* TO '{user}'@'localhost' IDENTIFIED BY '{password}'; + CREATE DATABASE dolphinscheduler DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci; + GRANT ALL PRIVILEGES ON dolphinscheduler.* TO '{user}'@'%' IDENTIFIED BY '{password}'; + GRANT ALL PRIVILEGES ON dolphinscheduler.* TO '{user}'@'localhost' IDENTIFIED BY '{password}'; flush privileges; ``` @@ -69,12 +69,12 @@ Configure SSH secret-free login on deployment machines and other installation ma Execute scripts for creating tables and importing basic data ``` - sh ./script/create-escheduler.sh + sh ./script/create-dolphinscheduler.sh ``` #### Preparations 5: Modify the deployment directory permissions and operation parameters - instruction of escheduler-backend directory + instruction of dolphinscheduler-backend directory ```directory bin : Basic service startup script @@ -85,11 +85,11 @@ sql : The project relies on SQL files install.sh : One-click deployment script ``` -- Modify permissions (please modify the 'deployUser' to the corresponding deployment user) so that the deployment user has operational privileges on the escheduler-backend directory +- Modify permissions (please modify the 'deployUser' to the corresponding deployment user) so that the deployment user has operational privileges on the dolphinscheduler-backend directory - `sudo chown -R deployUser:deployUser escheduler-backend` + `sudo chown -R deployUser:deployUser dolphinscheduler-backend` -- Modify the `.escheduler_env.sh` environment variable in the conf/env/directory +- Modify the `.dolphinscheduler_env.sh` environment variable in the conf/env/directory - Modify deployment parameters (depending on your server and business situation): @@ -132,11 +132,11 @@ After successful deployment, the log can be viewed and stored in a specified fol ```logPath logs/ - ├── escheduler-alert-server.log - ├── escheduler-master-server.log - |—— escheduler-worker-server.log - |—— escheduler-api-server.log - |—— escheduler-logger-server.log + ├── dolphinscheduler-alert-server.log + ├── dolphinscheduler-master-server.log + |—— dolphinscheduler-worker-server.log + |—— dolphinscheduler-api-server.log + |—— dolphinscheduler-logger-server.log ``` ### Compile source code to deploy @@ -151,7 +151,7 @@ After downloading the release version of the source package, unzip it into the r * View directory -After normal compilation, ./target/escheduler-{version}/ is generated in the current directory +After normal compilation, ./target/dolphinscheduler-{version}/ is generated in the current directory ### Start-and-stop services commonly used in systems (for service purposes, please refer to System Architecture Design for details) @@ -167,41 +167,41 @@ After normal compilation, ./target/escheduler-{version}/ is generated in the cur * start and stop one master server ```master -sh ./bin/escheduler-daemon.sh start master-server -sh ./bin/escheduler-daemon.sh stop master-server +sh ./bin/dolphinscheduler-daemon.sh start master-server +sh ./bin/dolphinscheduler-daemon.sh stop master-server ``` * start and stop one worker server ```worker -sh ./bin/escheduler-daemon.sh start worker-server -sh ./bin/escheduler-daemon.sh stop worker-server +sh ./bin/dolphinscheduler-daemon.sh start worker-server +sh ./bin/dolphinscheduler-daemon.sh stop worker-server ``` * start and stop api server ```Api -sh ./bin/escheduler-daemon.sh start api-server -sh ./bin/escheduler-daemon.sh stop api-server +sh ./bin/dolphinscheduler-daemon.sh start api-server +sh ./bin/dolphinscheduler-daemon.sh stop api-server ``` * start and stop logger server ```Logger -sh ./bin/escheduler-daemon.sh start logger-server -sh ./bin/escheduler-daemon.sh stop logger-server +sh ./bin/dolphinscheduler-daemon.sh start logger-server +sh ./bin/dolphinscheduler-daemon.sh stop logger-server ``` * start and stop alert server ```Alert -sh ./bin/escheduler-daemon.sh start alert-server -sh ./bin/escheduler-daemon.sh stop alert-server +sh ./bin/dolphinscheduler-daemon.sh start alert-server +sh ./bin/dolphinscheduler-daemon.sh stop alert-server ``` ## Database Upgrade Database upgrade is a function added in version 1.0.2. The database can be upgraded automatically by executing the following command: ```upgrade -sh ./script/upgrade-escheduler.sh +sh ./script/upgrade-dolphinscheduler.sh ``` diff --git a/docs/en_US/backend-development.md b/docs/en_US/backend-development.md index 10f7ba47f6..b287fad7bc 100644 --- a/docs/en_US/backend-development.md +++ b/docs/en_US/backend-development.md @@ -7,7 +7,7 @@ * [ZooKeeper](https://mirrors.tuna.tsinghua.edu.cn/apache/zookeeper)(3.4.6+) :Must be installed * [Maven](http://maven.apache.org/download.cgi)(3.3+) :Must be installed -Because the escheduler-rpc module in EasyScheduler uses Grpc, you need to use Maven to compile the generated classes. +Because the dolphinscheduler-rpc module in EasyScheduler uses Grpc, you need to use Maven to compile the generated classes. For those who are not familiar with maven, please refer to: [maven in five minutes](http://maven.apache.org/guides/getting-started/maven-in-five-minutes.html)(3.3+) http://maven.apache.org/install.html @@ -23,7 +23,7 @@ After importing the EasyScheduler source code into the development tools such as * View directory -After normal compilation, it will generate ./target/escheduler-{version}/ in the current directory. +After normal compilation, it will generate ./target/dolphinscheduler-{version}/ in the current directory. ``` bin diff --git a/docs/en_US/frontend-deployment.md b/docs/en_US/frontend-deployment.md index 919caf1485..d2a3cda0bb 100644 --- a/docs/en_US/frontend-deployment.md +++ b/docs/en_US/frontend-deployment.md @@ -10,7 +10,7 @@ The front-end has three deployment modes: automated deployment, manual deploymen Please download the latest version of the installation package, download address: [gitee](https://gitee.com/easyscheduler/EasyScheduler/attach_files/) -After downloading escheduler-ui-x.x.x.tar.gz,decompress`tar -zxvf escheduler-ui-x.x.x.tar.gz ./`and enter the`escheduler-ui`directory +After downloading dolphinscheduler-ui-x.x.x.tar.gz,decompress`tar -zxvf dolphinscheduler-ui-x.x.x.tar.gz ./`and enter the`dolphinscheduler-ui`directory @@ -21,7 +21,7 @@ Automated deployment is recommended for either of the following two ways ### Automated Deployment -Edit the installation file`vi install-escheduler-ui.sh` in the` escheduler-ui` directory +Edit the installation file`vi install-dolphinscheduler-ui.sh` in the` dolphinscheduler-ui` directory Change the front-end access port and the back-end proxy interface address @@ -35,7 +35,7 @@ esc_proxy_port="http://192.168.xx.xx:12345" >Front-end automatic deployment based on Linux system `yum` operation, before deployment, please install and update`yum` -under this directory, execute`./install-escheduler-ui.sh` +under this directory, execute`./install-dolphinscheduler-ui.sh` ### Manual Deployment @@ -63,7 +63,7 @@ server { root /xx/dist; # the dist directory address decompressed by the front end above (self-modifying) index index.html index.html; } - location /escheduler { + location /dolphinscheduler { proxy_pass http://192.168.xx.xx:12345; # interface address (self-modifying) proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; diff --git a/docs/en_US/frontend-development.md b/docs/en_US/frontend-development.md index 286c598dbc..bb3e464fb7 100644 --- a/docs/en_US/frontend-development.md +++ b/docs/en_US/frontend-development.md @@ -23,7 +23,7 @@ Node package download (note version 8.9.4) `https://nodejs.org/download/release/ - #### Front-end project construction -Use the command line mode `cd` enter the `escheduler-ui` project directory and execute `npm install` to pull the project dependency package. +Use the command line mode `cd` enter the `dolphinscheduler-ui` project directory and execute `npm install` to pull the project dependency package. > If `npm install` is very slow @@ -34,7 +34,7 @@ Use the command line mode `cd` enter the `escheduler-ui` project directory and - Create a new `.env` file or the interface that interacts with the backend -Create a new` .env` file in the `escheduler-ui `directory, add the ip address and port of the backend service to the file, and use it to interact with the backend. The contents of the` .env` file are as follows: +Create a new` .env` file in the `dolphinscheduler-ui `directory, add the ip address and port of the backend service to the file, and use it to interact with the backend. The contents of the` .env` file are as follows: ``` # Proxy interface address (modified by yourself) API_BASE = http://192.168.xx.xx:12345 @@ -67,7 +67,7 @@ Visit address` http://localhost:8888/#/` Install pm2 `npm install -g pm2` -Execute `pm2 start npm -- run dev` to start the project in the project `escheduler-ui `root directory +Execute `pm2 start npm -- run dev` to start the project in the project `dolphinscheduler-ui `root directory #### command @@ -81,7 +81,7 @@ Execute `pm2 start npm -- run dev` to start the project in the project `eschedul ``` -[root@localhost escheduler-ui]# pm2 start npm -- run dev +[root@localhost dolphinscheduler-ui]# pm2 start npm -- run dev [PM2] Applying action restartProcessId on app [npm](ids: 0) [PM2] [npm](0) ✓ [PM2] Process successfully started diff --git a/docs/en_US/quick-start.md b/docs/en_US/quick-start.md index a1dc255345..b4dce0ea5f 100644 --- a/docs/en_US/quick-start.md +++ b/docs/en_US/quick-start.md @@ -2,7 +2,7 @@ * Administrator user login - > Address:192.168.xx.xx:8888 Username and password:admin/escheduler123 + > Address:192.168.xx.xx:8888 Username and password:admin/dolphinscheduler123

diff --git a/docs/en_US/system-manual.md b/docs/en_US/system-manual.md index d571e1d66f..751420c2d4 100644 --- a/docs/en_US/system-manual.md +++ b/docs/en_US/system-manual.md @@ -331,7 +331,7 @@ conf/common/hadoop.properties ## Security - The security has the functions of queue management, tenant management, user management, warning group management, worker group manager, token manage and other functions. It can also authorize resources, data sources, projects, etc. -- Administrator login, default username password: admin/escheduler 123 +- Administrator login, default username password: admin/dolphinscheduler123 @@ -391,7 +391,7 @@ conf/common/hadoop.properties CloseableHttpClient httpclient = HttpClients.createDefault(); // create http post request - HttpPost httpPost = new HttpPost("http://127.0.0.1:12345/escheduler/projects/create"); + HttpPost httpPost = new HttpPost("http://127.0.0.1:12345/dolphinscheduler/projects/create"); httpPost.setHeader("token", "123"); // set parameters List parameters = new ArrayList(); diff --git a/docs/en_US/upgrade.md b/docs/en_US/upgrade.md index b5c743fd84..28af7d0703 100644 --- a/docs/en_US/upgrade.md +++ b/docs/en_US/upgrade.md @@ -3,13 +3,13 @@ ## 1. Back up the previous version of the files and database -## 2. Stop all services of escheduler +## 2. Stop all services of dolphinscheduler `sh ./script/stop-all.sh` ## 3. Download the new version of the installation package -- [gitee](https://gitee.com/easyscheduler/EasyScheduler/attach_files), download the latest version of the front and back installation packages (backend referred to as escheduler-backend, front end referred to as escheduler-ui) +- [gitee](https://gitee.com/easyscheduler/EasyScheduler/attach_files), download the latest version of the front and back installation packages (backend referred to as dolphinscheduler-backend, front end referred to as dolphinscheduler-ui) - The following upgrade operations need to be performed in the new version of the directory ## 4. Database upgrade @@ -23,7 +23,7 @@ - Execute database upgrade script -`sh ./script/upgrade-escheduler.sh` +`sh ./script/upgrade-dolphinscheduler.sh` ## 5. Backend service upgrade diff --git a/docs/zh_CN/EasyScheduler-FAQ.md b/docs/zh_CN/EasyScheduler-FAQ.md index 360565a4ee..a4419a4c00 100644 --- a/docs/zh_CN/EasyScheduler-FAQ.md +++ b/docs/zh_CN/EasyScheduler-FAQ.md @@ -50,7 +50,7 @@ A:我们同时 **支持流程和任务的优先级**。优先级我们有 **HI ---- -## Q:escheduler-grpc报错 +## Q:dolphinscheduler-grpc报错 A:在根目录下执行:mvn -U clean package assembly:assembly -Dmaven.test.skip=true , 然后刷新下整个项目 @@ -70,11 +70,11 @@ A:单独安装 **npm install node-sass --unsafe-perm**,之后再 **npm insta ## Q:UI 不能正常登陆访问 -A: 1,如果是node启动的查看escheduler-ui下的.env API_BASE配置是否是Api Server服务地址 +A: 1,如果是node启动的查看dolphinscheduler-ui下的.env API_BASE配置是否是Api Server服务地址 - 2,如果是nginx启动的并且是通过 **install-escheduler-ui.sh** 安装的,查看 **/etc/nginx/conf.d/escheduler.conf** 中的proxy_pass配置是否是Api Server服务地址 + 2,如果是nginx启动的并且是通过 **install-dolphinscheduler-ui.sh** 安装的,查看 **/etc/nginx/conf.d/dolphinscheduler.conf** 中的proxy_pass配置是否是Api Server服务地址 - 3,如果以上配置都是正确的,那么请查看Api Server服务是否是正常的,curl http://192.168.xx.xx:12345/escheduler/users/get-user-info,查看Api Server日志,如果提示 cn.escheduler.api.interceptor.LoginHandlerInterceptor:[76] - session info is null,则证明Api Server服务是正常的 + 3,如果以上配置都是正确的,那么请查看Api Server服务是否是正常的,curl http://192.168.xx.xx:12345/dolphinscheduler/users/get-user-info,查看Api Server日志,如果提示 cn.dolphinscheduler.api.interceptor.LoginHandlerInterceptor:[76] - session info is null,则证明Api Server服务是正常的 4,如果以上都没有问题,需要查看一下 **application.properties** 中的 **server.context-path 和 server.port 配置**是否正确 @@ -84,7 +84,7 @@ A: 1,如果是node启动的查看escheduler-ui下的.env API_BASE配置是 A: 1,首先通过**jps 查看MasterServer服务是否存在**,或者从服务监控直接查看zk中是否存在master服务 -​ 2,如果存在master服务,查看 **命令状态统计** 或者 **t_escheduler_error_command** 中是否增加的新记录,如果增加了,**请查看 message 字段定位启动异常原因** +​ 2,如果存在master服务,查看 **命令状态统计** 或者 **t_dolphinscheduler_error_command** 中是否增加的新记录,如果增加了,**请查看 message 字段定位启动异常原因** --- @@ -102,9 +102,9 @@ A: 1,首先通过**jps 查看WorkerServer服务是否存在**,或者从服 A: 提供Docker镜像及Dockerfile。 -Docker镜像地址:https://hub.docker.com/r/escheduler/escheduler_images +Docker镜像地址:https://hub.docker.com/r/dolphinscheduler/dolphinscheduler_images -Dockerfile地址:https://github.com/qiaozhanwei/escheduler_dockerfile/tree/master/docker_escheduler +Dockerfile地址:https://github.com/qiaozhanwei/dolphinscheduler_dockerfile/tree/master/docker_dolphinscheduler --- @@ -112,9 +112,9 @@ Dockerfile地址:https://github.com/qiaozhanwei/escheduler_dockerfile/tree/mas A: 1,如果替换变量中包含特殊字符,**请用 \ 转移符进行转移** -​ 2,installPath="/data1_1T/escheduler",**这个目录不能和当前要一键安装的install.sh目录是一样的** +​ 2,installPath="/data1_1T/dolphinscheduler",**这个目录不能和当前要一键安装的install.sh目录是一样的** -​ 3,deployUser="escheduler",**部署用户必须具有sudo权限**,因为worker是通过sudo -u 租户 sh xxx.command进行执行的 +​ 3,deployUser="dolphinscheduler",**部署用户必须具有sudo权限**,因为worker是通过sudo -u 租户 sh xxx.command进行执行的 ​ 4,monitorServerState="false",服务监控脚本是否启动,默认是不启动服务监控脚本的。**如果启动服务监控脚本,则每5分钟定时来监控master和worker的服务是否down机,如果down机则会自动重启** @@ -126,7 +126,7 @@ A: 1,如果替换变量中包含特殊字符,**请用 \ 转移符进行 ## Q : 流程定义和流程实例下线异常 -A : 对于 **1.0.4 以前的版本中**,修改escheduler-api cn.escheduler.api.quartz包下的代码即可 +A : 对于 **1.0.4 以前的版本中**,修改dolphinscheduler-api cn.dolphinscheduler.api.quartz包下的代码即可 ``` public boolean deleteJob(String jobName, String jobGroupName) { @@ -205,7 +205,7 @@ A: 1,在 **流程定义列表**,点击 **启动** 按钮 ## Q : Python任务设置Python版本 -A: 1,对于1**.0.3之后的版本**只需要修改 conf/env/.escheduler_env.sh中的PYTHON_HOME +A: 1,对于1**.0.3之后的版本**只需要修改 conf/env/.dolphinscheduler_env.sh中的PYTHON_HOME ``` export PYTHON_HOME=/bin/python diff --git a/docs/zh_CN/SUMMARY.md b/docs/zh_CN/SUMMARY.md index 2b153b60c5..06d17b9215 100644 --- a/docs/zh_CN/SUMMARY.md +++ b/docs/zh_CN/SUMMARY.md @@ -29,7 +29,7 @@ * [开发环境搭建](后端开发文档.md#项目编译) * [自定义任务插件文档](任务插件开发.md#任务插件开发) -* [接口文档](http://52.82.13.76:8888/escheduler/doc.html?language=zh_CN&lang=cn) +* [接口文档](http://52.82.13.76:8888/dolphinscheduler/doc.html?language=zh_CN&lang=cn) * FAQ * [FAQ](EasyScheduler-FAQ.md) * 系统版本升级文档 diff --git a/docs/zh_CN/任务插件开发.md b/docs/zh_CN/任务插件开发.md index 5e733b9540..0157735974 100644 --- a/docs/zh_CN/任务插件开发.md +++ b/docs/zh_CN/任务插件开发.md @@ -6,8 +6,8 @@ #### 基于YARN的计算(参见MapReduceTask) -- 需要在 **cn.escheduler.server.worker.task** 下的 **TaskManager** 类中创建自定义任务(也需在TaskType注册对应的任务类型) -- 需要继承**cn.escheduler.server.worker.task** 下的 **AbstractYarnTask** +- 需要在 **org.apache.dolphinscheduler.server.worker.task** 下的 **TaskManager** 类中创建自定义任务(也需在TaskType注册对应的任务类型) +- 需要继承**org.apache.dolphinscheduler.server.worker.task** 下的 **AbstractYarnTask** - 构造方法调度 **AbstractYarnTask** 构造方法 - 继承 **AbstractParameters** 自定义任务参数实体 - 重写 **AbstractTask** 的 **init** 方法中解析**自定义任务参数** @@ -16,9 +16,9 @@ #### 基于非YARN的计算(参见ShellTask) -- 需要在 **cn.escheduler.server.worker.task** 下的 **TaskManager** 中创建自定义任务 +- 需要在 **org.apache.dolphinscheduler.server.worker.task** 下的 **TaskManager** 中创建自定义任务 -- 需要继承**cn.escheduler.server.worker.task** 下的 **AbstractTask** +- 需要继承**org.apache.dolphinscheduler.server.worker.task** 下的 **AbstractTask** - 构造方法中实例化 **ShellCommandExecutor** @@ -46,8 +46,8 @@ ### 基于非SHELL的任务(参见SqlTask) -- 需要在 **cn.escheduler.server.worker.task** 下的 **TaskManager** 中创建自定义任务 -- 需要继承**cn.escheduler.server.worker.task** 下的 **AbstractTask** +- 需要在 **org.apache.dolphinscheduler.server.worker.task** 下的 **TaskManager** 中创建自定义任务 +- 需要继承**org.apache.dolphinscheduler.server.worker.task** 下的 **AbstractTask** - 继承 **AbstractParameters** 自定义任务参数实体 - 构造方法或者重写 **AbstractTask** 的 **init** 方法中,解析自定义任务参数实体 - 重写 **handle** 方法实现业务逻辑并设置相应的**exitStatusCode** diff --git a/docs/zh_CN/前端开发文档.md b/docs/zh_CN/前端开发文档.md index f805f5ed8c..4d5cea11f3 100644 --- a/docs/zh_CN/前端开发文档.md +++ b/docs/zh_CN/前端开发文档.md @@ -23,7 +23,7 @@ Node包下载 (注意版本 8.9.4) `https://nodejs.org/download/release/v8.9.4/` - #### 前端项目构建 -用命令行模式 `cd` 进入 `escheduler-ui`项目目录并执行 `npm install` 拉取项目依赖包 +用命令行模式 `cd` 进入 `dolphinscheduler-ui`项目目录并执行 `npm install` 拉取项目依赖包 > 如果 `npm install` 速度非常慢 @@ -34,7 +34,7 @@ Node包下载 (注意版本 8.9.4) `https://nodejs.org/download/release/v8.9.4/` - 新建一个`.env`文件,用于跟后端交互的接口 -在`escheduler-ui`目录下新建一个`.env`文件,在文件里添加后端服务的ip地址和端口,用于跟后端交互,`.env`文件内容如下: +在`dolphinscheduler-ui`目录下新建一个`.env`文件,在文件里添加后端服务的ip地址和端口,用于跟后端交互,`.env`文件内容如下: ``` # 代理的接口地址(自行修改) API_BASE = http://192.168.xx.xx:12345 @@ -68,7 +68,7 @@ npm install node-sass --unsafe-perm //单独安装node-sass依赖 安装pm2 `npm install -g pm2` -在项目`escheduler-ui`根目录执行 `pm2 start npm -- run dev` 启动项目 +在项目`dolphinscheduler-ui`根目录执行 `pm2 start npm -- run dev` 启动项目 #### 命令 @@ -82,7 +82,7 @@ npm install node-sass --unsafe-perm //单独安装node-sass依赖 ``` -[root@localhost escheduler-ui]# pm2 start npm -- run dev +[root@localhost dolphinscheduler-ui]# pm2 start npm -- run dev [PM2] Applying action restartProcessId on app [npm](ids: 0) [PM2] [npm](0) ✓ [PM2] Process successfully started diff --git a/docs/zh_CN/前端部署文档.md b/docs/zh_CN/前端部署文档.md index dc9cf61216..8bb7324531 100644 --- a/docs/zh_CN/前端部署文档.md +++ b/docs/zh_CN/前端部署文档.md @@ -7,7 +7,7 @@ 请下载最新版本的安装包,下载地址: [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files/) 或者 [github下载](https://github.com/analysys/EasyScheduler/releases) -下载 escheduler-ui-x.x.x.tar.gz 后,解压`tar -zxvf escheduler-ui-x.x.x.tar.gz ./`后,进入`escheduler-ui`目录 +下载 dolphinscheduler-ui-x.x.x.tar.gz 后,解压`tar -zxvf dolphinscheduler-ui-x.x.x.tar.gz ./`后,进入`dolphinscheduler-ui`目录 @@ -16,7 +16,7 @@ 以下两种方式任选其一部署即可,推荐自动化部署 ### 2.1 自动化部署 -在`escheduler-ui`目录下编辑安装文件`vi install-escheduler-ui.sh` +在`dolphinscheduler-ui`目录下编辑安装文件`vi install-dolphinscheduler-ui.sh` 更改前端访问端口和后端代理接口地址 @@ -30,7 +30,7 @@ esc_proxy_port="http://192.168.xx.xx:12345" >前端自动部署基于linux系统`yum`操作,部署之前请先安装更新`yum` -在该目录下执行`./install-escheduler-ui.sh` +在该目录下执行`./install-dolphinscheduler-ui.sh` ### 2.2 手动部署 @@ -55,7 +55,7 @@ server { root /xx/dist; # 上面前端解压的dist目录地址(自行修改) index index.html index.html; } - location /escheduler { + location /dolphinscheduler { proxy_pass http://192.168.xx.xx:12345; # 接口地址(自行修改) proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; diff --git a/docs/zh_CN/升级文档.md b/docs/zh_CN/升级文档.md index 83166971fc..b2d9c29cfe 100644 --- a/docs/zh_CN/升级文档.md +++ b/docs/zh_CN/升级文档.md @@ -3,13 +3,13 @@ ## 1. 备份上一版本文件和数据库 -## 2. 停止escheduler所有服务 +## 2. 停止dolphinscheduler所有服务 `sh ./script/stop-all.sh` ## 3. 下载新版本的安装包 -- [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files), 下载最新版本的前后端安装包(后端简称escheduler-backend、前端简称escheduler-ui) +- [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files), 下载最新版本的前后端安装包(后端简称dolphinscheduler-backend、前端简称dolphinscheduler-ui) - 以下升级操作都需要在新版本的目录进行 ## 4. 数据库升级 @@ -23,7 +23,7 @@ - 执行数据库升级脚本 -`sh ./script/upgrade-escheduler.sh` +`sh ./script/upgrade-dolphinscheduler.sh` ## 5. 后端服务升级 diff --git a/docs/zh_CN/后端开发文档.md b/docs/zh_CN/后端开发文档.md index 7d2d34a0a0..6159f85c0a 100644 --- a/docs/zh_CN/后端开发文档.md +++ b/docs/zh_CN/后端开发文档.md @@ -7,7 +7,7 @@ * [ZooKeeper](https://mirrors.tuna.tsinghua.edu.cn/apache/zookeeper)(3.4.6+) :必装 * [Maven](http://maven.apache.org/download.cgi)(3.3+) :必装 -因EasyScheduler中escheduler-rpc模块使用到Grpc,需要用到Maven编译生成所需要的类 +因EasyScheduler中dolphinscheduler-rpc模块使用到Grpc,需要用到Maven编译生成所需要的类 对maven不熟的伙伴请参考: [maven in five minutes](http://maven.apache.org/guides/getting-started/maven-in-five-minutes.html)(3.3+) http://maven.apache.org/install.html @@ -23,7 +23,7 @@ http://maven.apache.org/install.html * 查看目录 -正常编译完后,会在当前目录生成 ./target/escheduler-{version}/ +正常编译完后,会在当前目录生成 ./target/dolphinscheduler-{version}/ ``` bin diff --git a/docs/zh_CN/后端部署文档.md b/docs/zh_CN/后端部署文档.md index bf217880a5..525b0e7f8a 100644 --- a/docs/zh_CN/后端部署文档.md +++ b/docs/zh_CN/后端部署文档.md @@ -4,7 +4,7 @@ ## 1、准备工作 -请下载最新版本的安装包,下载地址: [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files/)或者[github下载](https://github.com/analysys/EasyScheduler/releases) ,下载escheduler-backend-x.x.x.tar.gz(后端简称escheduler-backend),escheduler-ui-x.x.x.tar.gz(前端简称escheduler-ui) +请下载最新版本的安装包,下载地址: [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files/)或者[github下载](https://github.com/apache/incubator-dolphinscheduler/releases) ,下载dolphinscheduler-backend-x.x.x.tar.gz(后端简称dolphinscheduler-backend),dolphinscheduler-ui-x.x.x.tar.gz(前端简称dolphinscheduler-ui) #### 准备一: 基础软件安装(必装项请自行安装) @@ -27,8 +27,8 @@ ```部署账号 vi /etc/sudoers -# 例如部署用户是escheduler账号 -escheduler ALL=(ALL) NOPASSWD: NOPASSWD: ALL +# 例如部署用户是dolphinscheduler账号 +dolphinscheduler ALL=(ALL) NOPASSWD: NOPASSWD: ALL # 并且需要注释掉 Default requiretty 一行 #Default requiretty @@ -47,9 +47,9 @@ escheduler ALL=(ALL) NOPASSWD: NOPASSWD: ALL 执行以下命令创建database和账号 ```sql - CREATE DATABASE escheduler DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci; - GRANT ALL PRIVILEGES ON escheduler.* TO '{user}'@'%' IDENTIFIED BY '{password}'; - GRANT ALL PRIVILEGES ON escheduler.* TO '{user}'@'localhost' IDENTIFIED BY '{password}'; + CREATE DATABASE dolphinscheduler DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci; + GRANT ALL PRIVILEGES ON dolphinscheduler.* TO '{user}'@'%' IDENTIFIED BY '{password}'; + GRANT ALL PRIVILEGES ON dolphinscheduler.* TO '{user}'@'localhost' IDENTIFIED BY '{password}'; flush privileges; ``` @@ -63,12 +63,12 @@ escheduler ALL=(ALL) NOPASSWD: NOPASSWD: ALL ``` 执行创建表和导入基础数据脚本 ``` - sh ./script/create-escheduler.sh + sh ./script/create-dolphinscheduler.sh ``` #### 准备五: 修改部署目录权限及运行参数 - escheduler-backend目录介绍 + dolphinscheduler-backend目录介绍 ``` bin : 基础服务启动脚本 @@ -79,11 +79,11 @@ sql : 项目依赖sql文件 install.sh : 一键部署脚本 ``` -- 修改权限(请将'deployUser'字段修改为对应部署用户),使得部署用户对escheduler-backend目录有操作权限 +- 修改权限(请将'deployUser'字段修改为对应部署用户),使得部署用户对dolphinscheduler-backend目录有操作权限 - `sudo chown -R deployUser:deployUser escheduler-backend` + `sudo chown -R deployUser:deployUser dolphinscheduler-backend` -- 修改conf/env/目录下的 `.escheduler_env.sh` 环境变量 +- 修改conf/env/目录下的 `.dolphinscheduler_env.sh` 环境变量 - 修改部署参数(根据自己服务器及业务情况): @@ -127,11 +127,11 @@ install.sh : 一键部署脚本 ```日志路径 logs/ - ├── escheduler-alert-server.log - ├── escheduler-master-server.log - |—— escheduler-worker-server.log - |—— escheduler-api-server.log - |—— escheduler-logger-server.log + ├── dolphinscheduler-alert-server.log + ├── dolphinscheduler-master-server.log + |—— dolphinscheduler-worker-server.log + |—— dolphinscheduler-api-server.log + |—— dolphinscheduler-logger-server.log ``` ### 2.2 编译源码来部署 @@ -146,7 +146,7 @@ install.sh : 一键部署脚本 * 查看目录 -正常编译完后,会在当前目录生成 `./target/escheduler-{version}/` +正常编译完后,会在当前目录生成 `./target/dolphinscheduler-{version}/` ```查看目录 ../ @@ -173,38 +173,38 @@ install.sh : 一键部署脚本 * 启停Master ```启动master -sh ./bin/escheduler-daemon.sh start master-server -sh ./bin/escheduler-daemon.sh stop master-server +sh ./bin/dolphinscheduler-daemon.sh start master-server +sh ./bin/dolphinscheduler-daemon.sh stop master-server ``` * 启停Worker ``` -sh ./bin/escheduler-daemon.sh start worker-server -sh ./bin/escheduler-daemon.sh stop worker-server +sh ./bin/dolphinscheduler-daemon.sh start worker-server +sh ./bin/dolphinscheduler-daemon.sh stop worker-server ``` * 启停Api ``` -sh ./bin/escheduler-daemon.sh start api-server -sh ./bin/escheduler-daemon.sh stop api-server +sh ./bin/dolphinscheduler-daemon.sh start api-server +sh ./bin/dolphinscheduler-daemon.sh stop api-server ``` * 启停Logger ``` -sh ./bin/escheduler-daemon.sh start logger-server -sh ./bin/escheduler-daemon.sh stop logger-server +sh ./bin/dolphinscheduler-daemon.sh start logger-server +sh ./bin/dolphinscheduler-daemon.sh stop logger-server ``` * 启停Alert ``` -sh ./bin/escheduler-daemon.sh start alert-server -sh ./bin/escheduler-daemon.sh stop alert-server +sh ./bin/dolphinscheduler-daemon.sh start alert-server +sh ./bin/dolphinscheduler-daemon.sh stop alert-server ``` ## 3、数据库升级 数据库升级是在1.0.2版本增加的功能,执行以下命令即可自动升级数据库 ``` -sh ./script/upgrade-escheduler.sh +sh ./script/upgrade-dolphinscheduler.sh ``` diff --git a/docs/zh_CN/快速上手.md b/docs/zh_CN/快速上手.md index 966ef88e84..7aa4f6577c 100644 --- a/docs/zh_CN/快速上手.md +++ b/docs/zh_CN/快速上手.md @@ -1,7 +1,7 @@ # 快速上手 * 管理员用户登录 - >地址:192.168.xx.xx:8888 用户名密码:admin/escheduler123 + >地址:192.168.xx.xx:8888 用户名密码:admin/dolphinscheduler123

diff --git a/docs/zh_CN/系统使用手册.md b/docs/zh_CN/系统使用手册.md index 348cc2b36a..8ec5d0a5ea 100644 --- a/docs/zh_CN/系统使用手册.md +++ b/docs/zh_CN/系统使用手册.md @@ -323,7 +323,7 @@ conf/common/hadoop.properties ## 安全中心(权限系统) - 安全中心是只有管理员账户才有权限的功能,有队列管理、租户管理、用户管理、告警组管理、worker分组、令牌管理等功能,还可以对资源、数据源、项目等授权 - - 管理员登录,默认用户名密码:admin/escheduler123 + - 管理员登录,默认用户名密码:admin/dolphinscheduler123 ### 创建队列 - 队列是在执行spark、mapreduce等程序,需要用到“队列”参数时使用的。 @@ -379,7 +379,7 @@ conf/common/hadoop.properties CloseableHttpClient httpclient = HttpClients.createDefault(); // create http post request - HttpPost httpPost = new HttpPost("http://127.0.0.1:12345/escheduler/projects/create"); + HttpPost httpPost = new HttpPost("http://127.0.0.1:12345/dolphinscheduler/projects/create"); httpPost.setHeader("token", "123"); // set parameters List parameters = new ArrayList(); diff --git a/dolphinscheduler-alert/pom.xml b/dolphinscheduler-alert/pom.xml new file mode 100644 index 0000000000..11dc7bd960 --- /dev/null +++ b/dolphinscheduler-alert/pom.xml @@ -0,0 +1,129 @@ + + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + 1.1.0-SNAPSHOT + + dolphinscheduler-alert + jar + + + UTF-8 + + + + junit + junit + test + + + + org.apache.commons + commons-email + + + + org.freemarker + freemarker + + + + com.alibaba + fastjson + + + + com.fasterxml.jackson.core + jackson-core + + + + com.fasterxml.jackson.core + jackson-databind + + + + org.slf4j + slf4j-api + + + + org.apache.commons + commons-collections4 + + + + commons-logging + commons-logging + + + + org.apache.commons + commons-lang3 + + + + com.google.guava + guava + + + + ch.qos.logback + logback-classic + + + + commons-io + commons-io + + + + + + org.apache.poi + poi + + + + org.apache.dolphinscheduler + dolphinscheduler-dao + + + + + + + + maven-assembly-plugin + 2.6 + + + src/main/assembly/package.xml + + false + + + + make-assembly + package + + single + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${java.version} + ${java.version} + ${project.build.sourceEncoding} + + + + + + diff --git a/dolphinscheduler-alert/src/main/assembly/package.xml b/dolphinscheduler-alert/src/main/assembly/package.xml new file mode 100644 index 0000000000..b10f76c62b --- /dev/null +++ b/dolphinscheduler-alert/src/main/assembly/package.xml @@ -0,0 +1,40 @@ + + cluster + + dir + + false + + + src/main/resources + + **/*.properties + **/*.xml + **/*.json + **/*.ftl + + conf + + + target/ + + dolphinscheduler-alert-${project.version}.jar + + lib + + + + + lib + true + + javax.servlet:servlet-api + org.eclipse.jetty.aggregate:jetty-all + org.slf4j:slf4j-log4j12 + + + + \ No newline at end of file diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java new file mode 100644 index 0000000000..3eba9fdbc8 --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert; + +import org.apache.dolphinscheduler.alert.runner.AlertSender; +import org.apache.dolphinscheduler.alert.utils.Constants; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.CommandLineRunner; +import org.springframework.boot.SpringApplication; +import org.springframework.context.annotation.ComponentScan; + +import java.util.List; + +/** + * alert of start + */ +@ComponentScan("cn.escheduler") +public class AlertServer implements CommandLineRunner { + private static final Logger logger = LoggerFactory.getLogger(AlertServer.class); + /** + * Alert Dao + */ + @Autowired + private AlertDao alertDao; + + private AlertSender alertSender; + + private static volatile AlertServer instance; + + public AlertServer() { + + } + + public static AlertServer getInstance(){ + if (null == instance) { + synchronized (AlertServer.class) { + if(null == instance) { + instance = new AlertServer(); + } + } + } + return instance; + } + + public void start(){ + logger.info("Alert Server ready start!"); + while (Stopper.isRunning()){ + try { + Thread.sleep(Constants.ALERT_SCAN_INTERVEL); + } catch (InterruptedException e) { + logger.error(e.getMessage(),e); + } + List alerts = alertDao.listWaitExecutionAlert(); + alertSender = new AlertSender(alerts, alertDao); + alertSender.run(); + } + } + + + public static void main(String[] args){ + SpringApplication app = new SpringApplication(AlertServer.class); + app.run(args); + } + + @Override + public void run(String... strings) throws Exception { + AlertServer alertServer = AlertServer.getInstance(); + alertServer.start(); + } +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EmailManager.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EmailManager.java new file mode 100644 index 0000000000..957f195e1c --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EmailManager.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.manager; + +import org.apache.dolphinscheduler.alert.utils.MailUtils; +import org.apache.dolphinscheduler.common.enums.ShowType; + +import java.util.List; +import java.util.Map; + +/** + * email send manager + */ +public class EmailManager { + /** + * email send + * @param receviersList + * @param receviersCcList + * @param title + * @param content + * @param showType + * @return + */ + public Map send(List receviersList,List receviersCcList,String title,String content,ShowType showType){ + + return MailUtils.sendMails(receviersList, receviersCcList, title, content, showType); + } + + /** + * msg send + * @param receviersList + * @param title + * @param content + * @param showType + * @return + */ + public Map send(List receviersList,String title,String content,ShowType showType){ + + return MailUtils.sendMails(receviersList,title, content, showType); + } +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EnterpriseWeChatManager.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EnterpriseWeChatManager.java new file mode 100644 index 0000000000..67f4fee7e0 --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EnterpriseWeChatManager.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.manager; + +import org.apache.dolphinscheduler.alert.utils.Constants; +import org.apache.dolphinscheduler.alert.utils.EnterpriseWeChatUtils; +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Enterprise WeChat Manager + */ +public class EnterpriseWeChatManager { + private static final Logger logger = LoggerFactory.getLogger(MsgManager.class); + /** + * Enterprise We Chat send + * @param alert + */ + public Map send(Alert alert, String token){ + Map retMap = new HashMap<>(); + retMap.put(Constants.STATUS, false); + String agentId = EnterpriseWeChatUtils.enterpriseWeChatAgentId; + String users = EnterpriseWeChatUtils.enterpriseWeChatUsers; + List userList = Arrays.asList(users.split(",")); + logger.info("send message {}",alert); + String msg = EnterpriseWeChatUtils.makeUserSendMsg(userList, agentId,EnterpriseWeChatUtils.markdownByAlert(alert)); + try { + EnterpriseWeChatUtils.sendEnterpriseWeChat(Constants.UTF_8, msg, token); + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + retMap.put(Constants.STATUS, true); + return retMap; + } + +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/MsgManager.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/MsgManager.java new file mode 100644 index 0000000000..35cc9bea3f --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/MsgManager.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.manager; + +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * SMS send manager + */ +public class MsgManager { + + private static final Logger logger = LoggerFactory.getLogger(MsgManager.class); + /** + * SMS send + * @param alert + */ + public void send(Alert alert){ + logger.info("send message {}",alert); + } +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/runner/AlertSender.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/runner/AlertSender.java new file mode 100644 index 0000000000..f7d73e0d77 --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/runner/AlertSender.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.runner; + +import org.apache.dolphinscheduler.alert.manager.EmailManager; +import org.apache.dolphinscheduler.alert.manager.EnterpriseWeChatManager; +import org.apache.dolphinscheduler.alert.utils.Constants; +import org.apache.dolphinscheduler.alert.utils.EnterpriseWeChatUtils; +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * alert sender + */ +public class AlertSender{ + + private static final Logger logger = LoggerFactory.getLogger(AlertSender.class); + + private static final EmailManager emailManager= new EmailManager(); + private static final EnterpriseWeChatManager weChatManager= new EnterpriseWeChatManager(); + + + private List alertList; + private AlertDao alertDao; + + public AlertSender(){} + public AlertSender(List alertList, AlertDao alertDao){ + super(); + this.alertList = alertList; + this.alertDao = alertDao; + } + + public void run() { + + List users; + + Map retMaps = null; + for(Alert alert:alertList){ + users = alertDao.listUserByAlertgroupId(alert.getAlertGroupId()); + + + + // receiving group list + List receviersList = new ArrayList(); + for(User user:users){ + receviersList.add(user.getEmail()); + } + // custom receiver + String receivers = alert.getReceivers(); + if (StringUtils.isNotEmpty(receivers)){ + String[] splits = receivers.split(","); + for (String receiver : splits){ + receviersList.add(receiver); + } + } + + // copy list + List receviersCcList = new ArrayList(); + + + // Custom Copier + String receiversCc = alert.getReceiversCc(); + + if (StringUtils.isNotEmpty(receiversCc)){ + String[] splits = receiversCc.split(","); + for (String receiverCc : splits){ + receviersCcList.add(receiverCc); + } + } + + if (CollectionUtils.isEmpty(receviersList) && CollectionUtils.isEmpty(receviersCcList)) { + logger.warn("alert send error : At least one receiver address required"); + alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, "execution failure,At least one receiver address required.", alert.getId()); + continue; + } + + if (alert.getAlertType() == AlertType.EMAIL){ + retMaps = emailManager.send(receviersList,receviersCcList, alert.getTitle(), alert.getContent(),alert.getShowType()); + + alert.setInfo(retMaps); + }else if (alert.getAlertType() == AlertType.SMS){ + retMaps = emailManager.send(getReciversForSMS(users), alert.getTitle(), alert.getContent(),alert.getShowType()); + alert.setInfo(retMaps); + } + + boolean flag = Boolean.parseBoolean(String.valueOf(retMaps.get(Constants.STATUS))); + if (flag){ + alertDao.updateAlert(AlertStatus.EXECUTION_SUCCESS, "execution success", alert.getId()); + logger.info("alert send success"); + try { + String token = EnterpriseWeChatUtils.getToken(); + weChatManager.send(alert,token); + } catch (Exception e) { + logger.error(e.getMessage(),e); + } + }else { + alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE,String.valueOf(retMaps.get(Constants.MESSAGE)),alert.getId()); + logger.info("alert send error : {}" , String.valueOf(retMaps.get(Constants.MESSAGE))); + } + } + + } + + + /** + * get a list of SMS users + * @param users + * @return + */ + private List getReciversForSMS(List users){ + List list = new ArrayList<>(); + for (User user : users){ + list.add(user.getPhone()); + } + return list; + } +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java new file mode 100644 index 0000000000..0a35d85fc4 --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.utils; + +/** + * constants + */ +public class Constants { + + /** + * alert properties path + */ + public static final String ALERT_PROPERTIES_PATH = "/alert.properties"; + + public static final String DATA_SOURCE_PROPERTIES_PATH = "/dao/data_source.properties__"; + + public static final String SINGLE_SLASH = "/"; + + /** + * UTF-8 + */ + public static final String UTF_8 = "UTF-8"; + + public static final String STATUS = "status"; + + public static final String MESSAGE = "message"; + + public static final String MAIL_PROTOCOL = "mail.protocol"; + + public static final String MAIL_SERVER_HOST = "mail.server.host"; + + public static final String MAIL_SERVER_PORT = "mail.server.port"; + + public static final String MAIL_SENDER = "mail.sender"; + + public static final String MAIL_USER = "mail.user"; + + public static final String MAIL_PASSWD = "mail.passwd"; + + public static final String XLS_FILE_PATH = "xls.file.path"; + + public static final String MAIL_HOST = "mail.smtp.host"; + + public static final String MAIL_PORT = "mail.smtp.port"; + + public static final String MAIL_SMTP_AUTH = "mail.smtp.auth"; + + public static final String MAIL_TRANSPORT_PROTOCOL = "mail.transport.protocol"; + + public static final String MAIL_SMTP_STARTTLS_ENABLE = "mail.smtp.starttls.enable"; + + public static final String MAIL_SMTP_SSL_ENABLE = "mail.smtp.ssl.enable"; + + public static final String MAIL_SMTP_SSL_TRUST="mail.smtp.ssl.trust"; + + public static final String TEXT_HTML_CHARSET_UTF_8 = "text/html;charset=utf-8"; + + public static final String STRING_TRUE = "true"; + + public static final String EXCEL_SUFFIX_XLS = ".xls"; + + public static final int NUMBER_1000 = 1000; + + public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name"; + + public static final String SPRING_DATASOURCE_URL = "spring.datasource.url"; + + public static final String SPRING_DATASOURCE_USERNAME = "spring.datasource.username"; + + public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password"; + + public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout"; + + public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize"; + + public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle"; + + public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive"; + + public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait"; + + public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis"; + + public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis"; + + public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery"; + + public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle"; + + public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow"; + + public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn"; + + public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements"; + + public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit"; + + public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive"; + + public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize"; + + public static final String DEVELOPMENT = "development"; + + public static final String CLASSPATH_MAIL_TEMPLATES_ALERT_MAIL_TEMPLATE_FTL = "classpath:mail_templates/alert_mail_template.ftl"; + + public static final String TR = ""; + + public static final String TD = ""; + + public static final String TD_END = ""; + + public static final String TR_END = ""; + + public static final String TITLE = "title"; + + public static final String CONTENT = "content"; + + public static final String TH = ""; + + public static final String TH_END = ""; + + public static final int ALERT_SCAN_INTERVEL = 5000; + + public static final String MARKDOWN_QUOTE = ">"; + + public static final String MARKDOWN_ENTER = "\n"; + + public static final String ENTERPRISE_WECHAT_CORP_ID = "enterprise.wechat.corp.id"; + + public static final String ENTERPRISE_WECHAT_SECRET = "enterprise.wechat.secret"; + + public static final String ENTERPRISE_WECHAT_TOKEN_URL = "enterprise.wechat.token.url"; + + public static final String ENTERPRISE_WECHAT_PUSH_URL = "enterprise.wechat.push.url"; + + public static final String ENTERPRISE_WECHAT_TEAM_SEND_MSG = "enterprise.wechat.team.send.msg"; + + public static final String ENTERPRISE_WECHAT_USER_SEND_MSG = "enterprise.wechat.user.send.msg"; + + public static final String ENTERPRISE_WECHAT_AGENT_ID = "enterprise.wechat.agent.id"; + + public static final String ENTERPRISE_WECHAT_USERS = "enterprise.wechat.users"; +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java new file mode 100644 index 0000000000..291ac5a82d --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java @@ -0,0 +1,246 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.utils; + +import org.apache.dolphinscheduler.common.enums.ShowType; +import org.apache.dolphinscheduler.dao.entity.Alert; +import com.alibaba.fastjson.JSON; + +import com.google.common.reflect.TypeToken; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpEntity; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.*; + +/** + * Enterprise WeChat utils + */ +public class EnterpriseWeChatUtils { + + public static final Logger logger = LoggerFactory.getLogger(EnterpriseWeChatUtils.class); + + private static final String enterpriseWeChatCorpId = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_CORP_ID); + + private static final String enterpriseWeChatSecret = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_SECRET); + + private static final String enterpriseWeChatTokenUrl = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TOKEN_URL); + private static String enterpriseWeChatTokenUrlReplace = enterpriseWeChatTokenUrl + .replaceAll("\\$corpId", enterpriseWeChatCorpId) + .replaceAll("\\$secret", enterpriseWeChatSecret); + + private static final String enterpriseWeChatPushUrl = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_PUSH_URL); + + private static final String enterpriseWeChatTeamSendMsg = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TEAM_SEND_MSG); + + private static final String enterpriseWeChatUserSendMsg = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG); + + public static final String enterpriseWeChatAgentId = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID); + + public static final String enterpriseWeChatUsers = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS); + + /** + * get Enterprise WeChat token info + * @return token string info + * @throws IOException + */ + public static String getToken() throws IOException { + String resp; + + CloseableHttpClient httpClient = HttpClients.createDefault(); + HttpGet httpGet = new HttpGet(enterpriseWeChatTokenUrlReplace); + CloseableHttpResponse response = httpClient.execute(httpGet); + try { + HttpEntity entity = response.getEntity(); + resp = EntityUtils.toString(entity, Constants.UTF_8); + EntityUtils.consume(entity); + } finally { + response.close(); + } + + Map map = JSON.parseObject(resp, + new TypeToken>() { + }.getType()); + return map.get("access_token").toString(); + } + + /** + * make team single Enterprise WeChat message + * @param toParty + * @param agentId + * @param msg + * @return Enterprise WeChat send message + */ + public static String makeTeamSendMsg(String toParty, String agentId, String msg) { + return enterpriseWeChatTeamSendMsg.replaceAll("\\$toParty", toParty) + .replaceAll("\\$agentId", agentId) + .replaceAll("\\$msg", msg); + } + + /** + * make team multi Enterprise WeChat message + * @param toParty + * @param agentId + * @param msg + * @return Enterprise WeChat send message + */ + public static String makeTeamSendMsg(Collection toParty, String agentId, String msg) { + String listParty = FuncUtils.mkString(toParty, "|"); + return enterpriseWeChatTeamSendMsg.replaceAll("\\$toParty", listParty) + .replaceAll("\\$agentId", agentId) + .replaceAll("\\$msg", msg); + } + + /** + * make team single user message + * @param toUser + * @param agentId + * @param msg + * @return Enterprise WeChat send message + */ + public static String makeUserSendMsg(String toUser, String agentId, String msg) { + return enterpriseWeChatUserSendMsg.replaceAll("\\$toUser", toUser) + .replaceAll("\\$agentId", agentId) + .replaceAll("\\$msg", msg); + } + + /** + * make team multi user message + * @param toUser + * @param agentId + * @param msg + * @return Enterprise WeChat send message + */ + public static String makeUserSendMsg(Collection toUser, String agentId, String msg) { + String listUser = FuncUtils.mkString(toUser, "|"); + return enterpriseWeChatUserSendMsg.replaceAll("\\$toUser", listUser) + .replaceAll("\\$agentId", agentId) + .replaceAll("\\$msg", msg); + } + + /** + * send Enterprise WeChat + * @param charset + * @param data + * @param token + * @return Enterprise WeChat resp, demo: {"errcode":0,"errmsg":"ok","invaliduser":""} + * @throws IOException + */ + public static String sendEnterpriseWeChat(String charset, String data, String token) throws IOException { + String enterpriseWeChatPushUrlReplace = enterpriseWeChatPushUrl.replaceAll("\\$token", token); + + CloseableHttpClient httpclient = HttpClients.createDefault(); + HttpPost httpPost = new HttpPost(enterpriseWeChatPushUrlReplace); + httpPost.setEntity(new StringEntity(data, charset)); + CloseableHttpResponse response = httpclient.execute(httpPost); + String resp; + try { + HttpEntity entity = response.getEntity(); + resp = EntityUtils.toString(entity, charset); + EntityUtils.consume(entity); + } finally { + response.close(); + } + logger.info("Enterprise WeChat send [{}], param:{}, resp:{}", enterpriseWeChatPushUrl, data, resp); + return resp; + } + + /** + * convert table to markdown style + * @param title + * @param content + * @return + */ + public static String markdownTable(String title,String content){ + List mapItemsList = JSONUtils.toList(content, LinkedHashMap.class); + StringBuilder contents = new StringBuilder(200); + for (LinkedHashMap mapItems : mapItemsList){ + + Set> entries = mapItems.entrySet(); + + Iterator> iterator = entries.iterator(); + + StringBuilder t = new StringBuilder(String.format("`%s`%s",title,Constants.MARKDOWN_ENTER)); + while (iterator.hasNext()){ + + Map.Entry entry = iterator.next(); + t.append(Constants.MARKDOWN_QUOTE); + t.append(entry.getKey()).append(":").append(entry.getValue()); + t.append(Constants.MARKDOWN_ENTER); + } + + contents.append(t); + } + return contents.toString(); + } + + /** + * convert text to markdown style + * @param title + * @param content + * @return + */ + public static String markdownText(String title,String content){ + if (StringUtils.isNotEmpty(content)){ + List list; + try { + list = JSONUtils.toList(content,String.class); + }catch (Exception e){ + logger.error("json format exception",e); + return null; + } + + StringBuilder contents = new StringBuilder(100); + contents.append(String.format("`%s`\n",title)); + for (String str : list){ + contents.append(Constants.MARKDOWN_QUOTE); + contents.append(str); + contents.append(Constants.MARKDOWN_ENTER); + } + + return contents.toString(); + + } + return null; + } + + /** + * Determine the mardown style based on the show type of the alert + * @param alert + * @return + */ + public static String markdownByAlert(Alert alert){ + String result = ""; + if (alert.getShowType() == ShowType.TABLE) { + result = markdownTable(alert.getTitle(),alert.getContent()); + }else if(alert.getShowType() == ShowType.TEXT){ + result = markdownText(alert.getTitle(),alert.getContent()); + } + return result; + + } + +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/ExcelUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/ExcelUtils.java new file mode 100644 index 0000000000..e033c775be --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/ExcelUtils.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.utils; + +import org.apache.poi.hssf.usermodel.HSSFCell; +import org.apache.poi.hssf.usermodel.HSSFRow; +import org.apache.poi.hssf.usermodel.HSSFSheet; +import org.apache.poi.hssf.usermodel.HSSFWorkbook; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.*; + +/** + * excel utils + */ +public class ExcelUtils { + + private static final Logger logger = LoggerFactory.getLogger(ExcelUtils.class); + /** + * generate excel file + * @param content + * @param title + * @param xlsFilePath + * @return + * @throws Exception + */ + public static void genExcelFile(String content,String title,String xlsFilePath){ + List itemsList; + try { + itemsList = JSONUtils.toList(content, LinkedHashMap.class); + }catch (Exception e){ + logger.error(String.format("json format incorrect : %s",content),e); + throw new RuntimeException("json format incorrect",e); + } + + if (itemsList == null || itemsList.size() == 0){ + logger.error("itemsList is null"); + throw new RuntimeException("itemsList is null"); + } + + LinkedHashMap headerMap = itemsList.get(0); + + List headerList = new ArrayList<>(); + + Iterator> iter = headerMap.entrySet().iterator(); + while (iter.hasNext()){ + Map.Entry en = iter.next(); + headerList.add(en.getKey()); + } + + HSSFWorkbook wb = null; + FileOutputStream fos = null; + try { + // declare a workbook + wb = new HSSFWorkbook(); + // generate a table + HSSFSheet sheet = wb.createSheet(); + HSSFRow row = sheet.createRow(0); + //set the height of the first line + row.setHeight((short)500); + + + //setting excel headers + for (int i = 0; i < headerList.size(); i++) { + HSSFCell cell = row.createCell(i); + cell.setCellValue(headerList.get(i)); + } + + //setting excel body + int rowIndex = 1; + for (LinkedHashMap itemsMap : itemsList){ + Object[] values = itemsMap.values().toArray(); + row = sheet.createRow(rowIndex); + //setting excel body height + row.setHeight((short)500); + rowIndex++; + for (int j = 0 ; j < values.length ; j++){ + HSSFCell cell1 = row.createCell(j); + cell1.setCellValue(String.valueOf(values[j])); + } + } + + for (int i = 0; i < headerList.size(); i++) { + sheet.setColumnWidth(i, headerList.get(i).length() * 800); + + } + + //setting file output + fos = new FileOutputStream(xlsFilePath + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS); + + wb.write(fos); + + }catch (Exception e){ + logger.error("generate excel error",e); + throw new RuntimeException("generate excel error",e); + }finally { + if (wb != null){ + try { + wb.close(); + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + } + if (fos != null){ + try { + fos.close(); + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + } + } + } + +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java new file mode 100644 index 0000000000..e682fde2e7 --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/FuncUtils.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.utils; + +public class FuncUtils { + + static public String mkString(Iterable list, String split) { + StringBuilder sb = new StringBuilder(); + boolean first = true; + for (String item : list) { + if (first) { + first = false; + } else { + sb.append(split); + } + sb.append(item); + } + return sb.toString(); + } + +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java new file mode 100644 index 0000000000..cab85c49e6 --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.utils; + +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +/** + * json utils + */ +public class JSONUtils { + + private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class); + + /** + * object to json string + * @param object + * @return json string + */ + public static String toJsonString(Object object) { + try{ + return JSONObject.toJSONString(object,false); + } catch (Exception e) { + throw new RuntimeException("Json deserialization exception.", e); + } + } + + /** + * json to list + * + * @param json + * @param clazz c + * @param + * @return + */ + public static List toList(String json, Class clazz) { + if (StringUtils.isEmpty(json)) { + return null; + } + try { + return JSONArray.parseArray(json, clazz); + } catch (Exception e) { + logger.error("JSONArray.parseArray exception!",e); + } + + return null; + } + +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java new file mode 100644 index 0000000000..607259c22b --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java @@ -0,0 +1,456 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.utils; + +import org.apache.dolphinscheduler.common.enums.ShowType; +import freemarker.cache.StringTemplateLoader; +import freemarker.template.Configuration; +import freemarker.template.Template; +import freemarker.template.TemplateException; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.mail.EmailException; +import org.apache.commons.mail.HtmlEmail; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.util.ResourceUtils; + +import javax.mail.*; +import javax.mail.internet.*; +import java.io.*; +import java.util.*; + +import static org.apache.dolphinscheduler.alert.utils.PropertyUtils.getInt; + + +/** + * mail utils + */ +public class MailUtils { + + public static final Logger logger = LoggerFactory.getLogger(MailUtils.class); + + public static final String mailProtocol = PropertyUtils.getString(Constants.MAIL_PROTOCOL); + + public static final String mailServerHost = PropertyUtils.getString(Constants.MAIL_SERVER_HOST); + + public static final Integer mailServerPort = PropertyUtils.getInt(Constants.MAIL_SERVER_PORT); + + public static final String mailSender = PropertyUtils.getString(Constants.MAIL_SENDER); + + public static final String mailUser = PropertyUtils.getString(Constants.MAIL_USER); + + public static final String mailPasswd = PropertyUtils.getString(Constants.MAIL_PASSWD); + + public static final Boolean mailUseStartTLS = PropertyUtils.getBoolean(Constants.MAIL_SMTP_STARTTLS_ENABLE); + + public static final Boolean mailUseSSL = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE); + + public static final String xlsFilePath = PropertyUtils.getString(Constants.XLS_FILE_PATH); + + public static final String starttlsEnable = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE); + + public static final String sslEnable = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_ENABLE); + + public static final String sslTrust = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_TRUST); + + private static Template MAIL_TEMPLATE; + + static { + Configuration cfg = new Configuration(Configuration.VERSION_2_3_21); + cfg.setDefaultEncoding(Constants.UTF_8); + StringTemplateLoader stringTemplateLoader = new StringTemplateLoader(); + cfg.setTemplateLoader(stringTemplateLoader); + InputStreamReader isr = null; + try { + isr = new InputStreamReader(new FileInputStream(ResourceUtils.getFile(Constants.CLASSPATH_MAIL_TEMPLATES_ALERT_MAIL_TEMPLATE_FTL)), + Constants.UTF_8); + + MAIL_TEMPLATE = new Template("alert_mail_template", isr, cfg); + } catch (Exception e) { + MAIL_TEMPLATE = null; + } finally { + IOUtils.closeQuietly(isr); + } + } + + + /** + * send mail to receivers + * + * @param receivers + * @param title + * @param content + * @return + */ + public static Map sendMails(Collection receivers, String title, String content,ShowType showType) { + return sendMails(receivers, null, title, content, showType); + } + + /** + * send mail + * @param receivers + * @param receiversCc cc + * @param title title + * @param content content + * @param showType mail type + * @return + */ + public static Map sendMails(Collection receivers, Collection receiversCc, String title, String content, ShowType showType) { + Map retMap = new HashMap<>(); + retMap.put(Constants.STATUS, false); + + // if there is no receivers && no receiversCc, no need to process + if (CollectionUtils.isEmpty(receivers) && CollectionUtils.isEmpty(receiversCc)) { + return retMap; + } + + receivers.removeIf((from) -> (StringUtils.isEmpty(from))); + + if (showType == ShowType.TABLE || showType == ShowType.TEXT){ + // send email + HtmlEmail email = new HtmlEmail(); + + try { + Session session = getSession(); + email.setMailSession(session); + email.setFrom(mailSender); + email.setCharset(Constants.UTF_8); + if (CollectionUtils.isNotEmpty(receivers)){ + // receivers mail + for (String receiver : receivers) { + email.addTo(receiver); + } + } + + if (CollectionUtils.isNotEmpty(receiversCc)){ + //cc + for (String receiverCc : receiversCc) { + email.addCc(receiverCc); + } + } + // sender mail + return getStringObjectMap(title, content, showType, retMap, email); + } catch (Exception e) { + handleException(receivers, retMap, e); + } + }else if (showType == ShowType.ATTACHMENT || showType == ShowType.TABLEATTACHMENT){ + try { + + String partContent = (showType == ShowType.ATTACHMENT ? "Please see the attachment " + title + Constants.EXCEL_SUFFIX_XLS : htmlTable(content,false)); + + attachment(receivers,receiversCc,title,content,partContent); + + retMap.put(Constants.STATUS, true); + return retMap; + }catch (Exception e){ + handleException(receivers, retMap, e); + return retMap; + } + } + return retMap; + + } + + /** + * html table content + * @param content + * @param showAll + * @return + */ + private static String htmlTable(String content, boolean showAll){ + if (StringUtils.isNotEmpty(content)){ + List mapItemsList = JSONUtils.toList(content, LinkedHashMap.class); + + if(!showAll && mapItemsList.size() > Constants.NUMBER_1000){ + mapItemsList = mapItemsList.subList(0,Constants.NUMBER_1000); + } + + StringBuilder contents = new StringBuilder(200); + + boolean flag = true; + + String title = ""; + for (LinkedHashMap mapItems : mapItemsList){ + + Set> entries = mapItems.entrySet(); + + Iterator> iterator = entries.iterator(); + + StringBuilder t = new StringBuilder(Constants.TR); + StringBuilder cs = new StringBuilder(Constants.TR); + while (iterator.hasNext()){ + + Map.Entry entry = iterator.next(); + t.append(Constants.TH).append(entry.getKey()).append(Constants.TH_END); + cs.append(Constants.TD).append(String.valueOf(entry.getValue())).append(Constants.TD_END); + + } + t.append(Constants.TR_END); + cs.append(Constants.TR_END); + if (flag){ + title = t.toString(); + } + flag = false; + contents.append(cs); + } + + return getTemplateContent(title,contents.toString()); + } + + return null; + } + + /** + * html table content + * @param content + * @return + */ + private static String htmlTable(String content){ + return htmlTable(content,true); + } + + /** + * html text content + * @param content + * @return + */ + private static String htmlText(String content){ + + if (StringUtils.isNotEmpty(content)){ + List list; + try { + list = JSONUtils.toList(content,String.class); + }catch (Exception e){ + logger.error("json format exception",e); + return null; + } + + StringBuilder contents = new StringBuilder(100); + for (String str : list){ + contents.append(Constants.TR); + contents.append(Constants.TD).append(str).append(Constants.TD_END); + contents.append(Constants.TR_END); + } + + return getTemplateContent(null,contents.toString()); + + } + + return null; + } + + + + + /** + * send mail as Excel attachment + * + * @param receivers + * @param title + * @throws Exception + */ + private static void attachment(Collection receivers,Collection receiversCc,String title,String content,String partContent)throws Exception{ + MimeMessage msg = getMimeMessage(receivers); + + attachContent(receiversCc, title, content,partContent, msg); + } + + /** + * get MimeMessage + * @param receivers + * @return + * @throws MessagingException + */ + private static MimeMessage getMimeMessage(Collection receivers) throws MessagingException { +// Security.addProvider(new com.sun.net.ssl.internal.ssl.Provider()); +// final String SSL_FACTORY = "javax.net.ssl.SSLSocketFactory"; + + // 1. The first step in creating mail: creating session + Session session = getSession(); + // Setting debug mode, can be turned off + session.setDebug(false); + + // 2. creating mail: Creating a MimeMessage + MimeMessage msg = new MimeMessage(session); + // 3. set sender + msg.setFrom(new InternetAddress(mailSender)); + // 4. set receivers + for (String receiver : receivers) { + msg.addRecipients(MimeMessage.RecipientType.TO, InternetAddress.parse(receiver)); + } + return msg; + } + + /** + * get session + * @return + */ + private static Session getSession() { + Properties props = new Properties(); + props.setProperty(Constants.MAIL_HOST, mailServerHost); + props.setProperty(Constants.MAIL_PORT, String.valueOf(mailServerPort)); + props.setProperty(Constants.MAIL_SMTP_AUTH, Constants.STRING_TRUE); + props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, mailProtocol); + props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, starttlsEnable); + props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, sslEnable); + props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, sslTrust); + + Authenticator auth = new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + // mail username and password + return new PasswordAuthentication(mailUser, mailPasswd); + } + }; + + Session session = Session.getInstance(props, auth); + return session; + } + + /** + * + * @param receiversCc + * @param title + * @param content + * @param partContent + * @param msg + * @throws MessagingException + * @throws IOException + */ + private static void attachContent(Collection receiversCc, String title, String content, String partContent,MimeMessage msg) throws MessagingException, IOException { + /** + * set receiverCc + */ + if(CollectionUtils.isNotEmpty(receiversCc)){ + for (String receiverCc : receiversCc){ + msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(receiverCc)); + } + } + + // set receivers type to cc + // msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(propMap.get("${CC}"))); + // set subject + msg.setSubject(title); + MimeMultipart partList = new MimeMultipart(); + // set signature + MimeBodyPart part1 = new MimeBodyPart(); + part1.setContent(partContent, Constants.TEXT_HTML_CHARSET_UTF_8); + // set attach file + MimeBodyPart part2 = new MimeBodyPart(); + // make excel file + ExcelUtils.genExcelFile(content,title,xlsFilePath); + File file = new File(xlsFilePath + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS); + part2.attachFile(file); + part2.setFileName(MimeUtility.encodeText(title + Constants.EXCEL_SUFFIX_XLS)); + // add components to collection + partList.addBodyPart(part1); + partList.addBodyPart(part2); + msg.setContent(partList); + // 5. send Transport + Transport.send(msg); + // 6. delete saved file + deleteFile(file); + } + + /** + * + * @param title + * @param content + * @param showType + * @param retMap + * @param email + * @return + * @throws EmailException + */ + private static Map getStringObjectMap(String title, String content, ShowType showType, Map retMap, HtmlEmail email) throws EmailException { + + /** + * the subject of the message to be sent + */ + email.setSubject(title); + /** + * to send information, you can use HTML tags in mail content because of the use of HtmlEmail + */ + if (showType == ShowType.TABLE) { + email.setMsg(htmlTable(content)); + } else if (showType == ShowType.TEXT) { + email.setMsg(htmlText(content)); + } + + // send + email.send(); + + retMap.put(Constants.STATUS, true); + + return retMap; + } + + /** + * file delete + * @param file + */ + public static void deleteFile(File file){ + if(file.exists()){ + if(file.delete()){ + logger.info("delete success:"+file.getAbsolutePath()+file.getName()); + }else{ + logger.info("delete fail"+file.getAbsolutePath()+file.getName()); + } + }else{ + logger.info("file not exists:"+file.getAbsolutePath()+file.getName()); + } + } + + + /** + * + * @param receivers + * @param retMap + * @param e + */ + private static void handleException(Collection receivers, Map retMap, Exception e) { + logger.error("Send email to {} failed", StringUtils.join(",", receivers), e); + retMap.put(Constants.MESSAGE, "Send email to {" + StringUtils.join(",", receivers) + "} failed," + e.toString()); + } + + /** + * + * @param title + * @param content + * @return + */ + private static String getTemplateContent(String title,String content){ + StringWriter out = new StringWriter(); + Map map = new HashMap<>(); + if(null != title){ + map.put(Constants.TITLE,title); + } + map.put(Constants.CONTENT,content); + try { + MAIL_TEMPLATE.process(map, out); + return out.toString(); + } catch (TemplateException e) { + logger.error(e.getMessage(),e); + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + + return null; + } +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java new file mode 100644 index 0000000000..225e4df3c1 --- /dev/null +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java @@ -0,0 +1,193 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.utils; + +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +import static org.apache.dolphinscheduler.alert.utils.Constants.ALERT_PROPERTIES_PATH; +import static org.apache.dolphinscheduler.alert.utils.Constants.DATA_SOURCE_PROPERTIES_PATH; + +/** + * property utils + * single instance + */ +public class PropertyUtils { + + /** + * logger + */ + private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class); + + private static final Properties properties = new Properties(); + + private static final PropertyUtils propertyUtils = new PropertyUtils(); + + private PropertyUtils(){ + init(); + } + + private void init(){ + String[] propertyFiles = new String[]{ALERT_PROPERTIES_PATH,DATA_SOURCE_PROPERTIES_PATH}; + for (String fileName : propertyFiles) { + InputStream fis = null; + try { + fis = PropertyUtils.class.getResourceAsStream(fileName); + properties.load(fis); + + } catch (IOException e) { + logger.error(e.getMessage(), e); + System.exit(1); + } finally { + IOUtils.closeQuietly(fis); + } + } + } + +/* + public static PropertyUtils getInstance(){ + return propertyUtils; + } +*/ + + /** + * get property value + * + * @param key property name + * @return + */ + public static String getString(String key) { + return properties.getProperty(key); + } + + /** + * get property value + * + * @param key property name + * @return get property int value , if key == null, then return -1 + */ + public static int getInt(String key) { + return getInt(key, -1); + } + + /** + * + * @param key + * @param defaultValue + * @return + */ + public static int getInt(String key, int defaultValue) { + String value = getString(key); + if (value == null) { + return defaultValue; + } + + try { + return Integer.parseInt(value); + } catch (NumberFormatException e) { + logger.info(e.getMessage(),e); + } + return defaultValue; + } + + /** + * get property value + * + * @param key property name + * @return + */ + public static Boolean getBoolean(String key) { + String value = properties.getProperty(key.trim()); + if(null != value){ + return Boolean.parseBoolean(value); + } + + return null; + } + + /** + * + * @param key + * @return + */ + public static long getLong(String key) { + return getLong(key,-1); + } + + /** + * + * @param key + * @param defaultVal + * @return + */ + public static long getLong(String key, long defaultVal) { + String val = getString(key); + return val == null ? defaultVal : Long.parseLong(val); + } + + + /** + * + * @param key + * @param defaultVal + * @return + */ + public double getDouble(String key, double defaultVal) { + String val = getString(key); + return val == null ? defaultVal : Double.parseDouble(val); + } + + + /** + * get array + * @param key property name + * @param splitStr separator + * @return + */ + public static String[] getArray(String key, String splitStr) { + String value = getString(key); + if (value == null) { + return null; + } + try { + String[] propertyArray = value.split(splitStr); + return propertyArray; + } catch (NumberFormatException e) { + logger.info(e.getMessage(),e); + } + return null; + } + + /** + * + * @param key + * @param type + * @param defaultValue + * @param + * @return get enum value + */ + public > T getEnum(String key, Class type, + T defaultValue) { + String val = getString(key); + return val == null ? defaultValue : Enum.valueOf(type, val); + } +} diff --git a/escheduler-alert/src/main/resources/alert.properties b/dolphinscheduler-alert/src/main/resources/alert.properties similarity index 100% rename from escheduler-alert/src/main/resources/alert.properties rename to dolphinscheduler-alert/src/main/resources/alert.properties diff --git a/dolphinscheduler-alert/src/main/resources/alert_logback.xml b/dolphinscheduler-alert/src/main/resources/alert_logback.xml new file mode 100644 index 0000000000..2b617817e0 --- /dev/null +++ b/dolphinscheduler-alert/src/main/resources/alert_logback.xml @@ -0,0 +1,31 @@ + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + ${log.base}/dolphinscheduler-alert.log + + ${log.base}/dolphinscheduler-alert.%d{yyyy-MM-dd_HH}.%i.log + 20 + 64MB + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + \ No newline at end of file diff --git a/escheduler-alert/src/main/resources/application_alert.properties b/dolphinscheduler-alert/src/main/resources/application_alert.properties similarity index 100% rename from escheduler-alert/src/main/resources/application_alert.properties rename to dolphinscheduler-alert/src/main/resources/application_alert.properties diff --git a/escheduler-alert/src/main/resources/mail_templates/alert_mail_template.ftl b/dolphinscheduler-alert/src/main/resources/mail_templates/alert_mail_template.ftl similarity index 100% rename from escheduler-alert/src/main/resources/mail_templates/alert_mail_template.ftl rename to dolphinscheduler-alert/src/main/resources/mail_templates/alert_mail_template.ftl diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java new file mode 100644 index 0000000000..527d828e40 --- /dev/null +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.utils; + +import com.alibaba.fastjson.JSON; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +/** + * Please manually modify the configuration file before testing. + * file: alert.properties + * enterprise.wechat.corp.id + * enterprise.wechat.secret + * enterprise.wechat.token.url + * enterprise.wechat.push.url + * enterprise.wechat.send.msg + * enterprise.wechat.agent.id + * enterprise.wechat.users + */ +@Ignore +public class EnterpriseWeChatUtilsTest { + + private String agentId = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID); // app id + private Collection listUserId = Arrays.asList(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS).split(",")); + + // Please change + private String partyId = "2"; + private Collection listPartyId = Arrays.asList("2","4"); + @Test + public void testSendSingleTeamWeChat() { + try { + String token = EnterpriseWeChatUtils.getToken(); + String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world"); + String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); + + String errmsg = JSON.parseObject(resp).getString("errmsg"); + Assert.assertEquals(errmsg, "ok"); + } catch (IOException e) { + e.printStackTrace(); + } + } + + @Test + public void testSendMultiTeamWeChat() { + + try { + String token = EnterpriseWeChatUtils.getToken(); + String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world"); + String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); + + String errmsg = JSON.parseObject(resp).getString("errmsg"); + Assert.assertEquals(errmsg, "ok"); + } catch (IOException e) { + e.printStackTrace(); + } + } + + @Test + public void testSendSingleUserWeChat() { + try { + String token = EnterpriseWeChatUtils.getToken(); + String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "您的会议室已经预定,稍后会同步到`邮箱` \n" + + ">**事项详情** \n" + + ">事 项:开会
" + + ">组织者:@miglioguan \n" + + ">参与者:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" + + "> \n" + + ">会议室:广州TIT 1楼 301 \n" + + ">日 期:2018年5月18日 \n" + + ">时 间:上午9:00-11:00 \n" + + "> \n" + + ">请准时参加会议。 \n" + + "> \n" + + ">如需修改会议信息,请点击:[修改会议信息](https://work.weixin.qq.com)\""); + + String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); + + String errmsg = JSON.parseObject(resp).getString("errmsg"); + Assert.assertEquals(errmsg, "ok"); + } catch (IOException e) { + e.printStackTrace(); + } + } + + @Test + public void testSendMultiUserWeChat() { + try { + String token = EnterpriseWeChatUtils.getToken(); + + String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world"); + String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); + + String errmsg = JSON.parseObject(resp).getString("errmsg"); + Assert.assertEquals(errmsg, "ok"); + } catch (IOException e) { + e.printStackTrace(); + } + } + +} diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java new file mode 100644 index 0000000000..7525256f9b --- /dev/null +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java @@ -0,0 +1,231 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.alert.utils; + + +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.enums.ShowType; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.apache.dolphinscheduler.dao.entity.User; +import freemarker.cache.StringTemplateLoader; +import freemarker.template.Configuration; +import freemarker.template.Template; +import freemarker.template.TemplateException; +import org.apache.commons.io.IOUtils; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.util.ResourceUtils; + +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.StringWriter; +import java.util.*; + + +/** + */ +@Ignore +public class MailUtilsTest { + private static final Logger logger = LoggerFactory.getLogger(MailUtilsTest.class); + @Test + public void testSendMails() { + String[] receivers = new String[]{"xx@xx.com"}; + String[] receiversCc = new String[]{"xxx@xxx.com"}; + + String content ="[\"id:69\"," + + "\"name:UserBehavior-0--1193959466\"," + + "\"Job name: 启动工作流\"," + + "\"State: SUCCESS\"," + + "\"Recovery:NO\"," + + "\"Run time: 1\"," + + "\"Start time: 2018-08-06 10:31:34.0\"," + + "\"End time: 2018-08-06 10:31:49.0\"," + + "\"Host: 192.168.xx.xx\"," + + "\"Notify group :4\"]"; + + Alert alert = new Alert(); + alert.setTitle("Mysql异常"); + alert.setShowType(ShowType.TEXT); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(4); + + MailUtils.sendMails(Arrays.asList(receivers),Arrays.asList(receiversCc),alert.getTitle(),alert.getContent(), ShowType.TEXT); + } + + + @Test + public void testQuery(){ + AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); + List alerts = alertDao.listWaitExecutionAlert(); + + String[] mails = new String[]{"xx@xx.com"}; + + for(Alert alert : alerts){ + MailUtils.sendMails(Arrays.asList(mails),"gaojing", alert.getContent(), alert.getShowType()); + } + + } + + public String list2String(){ + + LinkedHashMap map1 = new LinkedHashMap<>(); + map1.put("mysql服务名称","mysql200"); + map1.put("mysql地址","192.168.xx.xx"); + map1.put("端口","3306"); + map1.put("期间内没有使用索引的查询数握","80"); + map1.put("数据库客户端连接数","190"); + + LinkedHashMap map2 = new LinkedHashMap<>(); + map2.put("mysql服务名称","mysql210"); + map2.put("mysql地址","192.168.xx.xx"); + map2.put("端口","3306"); + map2.put("期间内没有使用索引的查询数握","10"); + map2.put("数据库客户端连接数","90"); + + List> maps = new ArrayList<>(); + maps.add(0,map1); + maps.add(1,map2); + String mapjson = JSONUtils.toJsonString(maps); + logger.info(mapjson); + + return mapjson; + + } + + @Test + public void testSendTableMail(){ + String[] mails = new String[]{"xx@xx.com"}; + Alert alert = new Alert(); + alert.setTitle("Mysql Exception"); + alert.setShowType(ShowType.TABLE); + String content= list2String(); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(1); + MailUtils.sendMails(Arrays.asList(mails),"gaojing", alert.getContent(), ShowType.TABLE); + } + + /** + * Used to test add alarm information, mail sent + * Text + */ + @Test + public void addAlertText(){ + AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); + Alert alert = new Alert(); + alert.setTitle("Mysql Exception"); + alert.setShowType(ShowType.TEXT); + alert.setContent("[\"告警时间:2018-02-05\", \"服务名:MYSQL_ALTER\", \"告警名:MYSQL_ALTER_DUMP\", \"获取告警异常!,接口报错,异常信息:timed out\", \"请求地址:http://blog.csdn.net/dreamInTheWorld/article/details/78539286\"]"); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(1); + alertDao.addAlert(alert); + } + + + /** + * Used to test add alarm information, mail sent + * Table + */ + @Test + public void addAlertTable(){ + AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); + Alert alert = new Alert(); + alert.setTitle("Mysql Exception"); + alert.setShowType(ShowType.TABLE); + + String content = list2String(); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(1); + alertDao.addAlert(alert); + } + + @Test + public void testAlertDao(){ + AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); + List users = alertDao.listUserByAlertgroupId(3); + logger.info(users.toString()); + } + + @Test + public void testAttachmentFile()throws Exception{ + String[] mails = new String[]{"xx@xx.com"}; + Alert alert = new Alert(); + alert.setTitle("Mysql Exception"); + alert.setShowType(ShowType.ATTACHMENT); + String content = list2String(); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(1); + MailUtils.sendMails(Arrays.asList(mails),"gaojing",alert.getContent(),ShowType.ATTACHMENT); + } + + @Test + public void testTableAttachmentFile()throws Exception{ + String[] mails = new String[]{"xx@xx.com"}; + Alert alert = new Alert(); + alert.setTitle("Mysql Exception"); + alert.setShowType(ShowType.TABLEATTACHMENT); + String content = list2String(); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(1); + MailUtils.sendMails(Arrays.asList(mails),"gaojing",alert.getContent(),ShowType.TABLEATTACHMENT); + } + + @Test + public void template(){ + Template MAIL_TEMPLATE; + Configuration cfg = new Configuration(Configuration.VERSION_2_3_21); + cfg.setDefaultEncoding(Constants.UTF_8); + StringTemplateLoader stringTemplateLoader = new StringTemplateLoader(); + cfg.setTemplateLoader(stringTemplateLoader); + InputStreamReader isr = null; + try { + isr = new InputStreamReader(new FileInputStream(ResourceUtils.getFile(Constants.CLASSPATH_MAIL_TEMPLATES_ALERT_MAIL_TEMPLATE_FTL)), + Constants.UTF_8); + + MAIL_TEMPLATE = new Template("alert_mail_template", isr, cfg); + } catch (Exception e) { + MAIL_TEMPLATE = null; + } finally { + IOUtils.closeQuietly(isr); + } + + + StringWriter out = new StringWriter(); + Map map = new HashMap<>(); + map.put(Constants.TITLE,"title_test"); + try { + MAIL_TEMPLATE.process(map, out); + logger.info(out.toString()); + + } catch (TemplateException e) { + logger.error(e.getMessage(),e); + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + + } + +} diff --git a/dolphinscheduler-api/pom.xml b/dolphinscheduler-api/pom.xml new file mode 100644 index 0000000000..2dd599c579 --- /dev/null +++ b/dolphinscheduler-api/pom.xml @@ -0,0 +1,219 @@ + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + 1.1.0-SNAPSHOT + + dolphinscheduler-api + jar + + + + org.apache.dolphinscheduler + dolphinscheduler-alert + + + org.apache.dolphinscheduler + dolphinscheduler-server + + + io.netty + netty + + + io.netty + netty-all + + + com.google + netty + + + leveldbjni-all + org.fusesource.leveldbjni + + + protobuf-java + com.google.protobuf + + + + + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-tomcat + + + log4j-to-slf4j + org.apache.logging.log4j + + + + + + + org.springframework.boot + spring-boot-starter-jetty + + + org.eclipse.jetty.websocket + javax-websocket-server-impl + + + org.eclipse.jetty.websocket + websocket-server + + + + + + org.springframework.boot + spring-boot-starter-test + test + + + org.ow2.asm + asm + + + org.springframework.boot + spring-boot + + + org.springframework.boot + spring-boot-autoconfigure + + + + + org.springframework.boot + spring-boot-starter-aop + + + org.springframework.boot + spring-boot-starter + + + + + org.springframework + spring-context + + + + org.apache.httpcomponents + httpcore + + + org.apache.httpcomponents + httpclient + + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.core + jackson-core + + + + com.alibaba + fastjson + + + + commons-collections + commons-collections + + + + org.quartz-scheduler + quartz + + + c3p0 + c3p0 + + + + + + org.quartz-scheduler + quartz-jobs + + + + io.springfox + springfox-swagger2 + 2.9.2 + + + + io.springfox + springfox-swagger-ui + 2.9.2 + + + + com.github.xiaoymin + swagger-bootstrap-ui + 1.9.3 + + + + org.apache.dolphinscheduler + dolphinscheduler-rpc + + + + junit + junit + 4.12 + test + + + + + + maven-assembly-plugin + 2.6 + + + src/main/assembly/package.xml + + false + + + + make-assembly + package + + single + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${java.version} + ${java.version} + ${project.build.sourceEncoding} + + + + + \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/assembly/package.xml b/dolphinscheduler-api/src/main/assembly/package.xml new file mode 100644 index 0000000000..1c24d64b03 --- /dev/null +++ b/dolphinscheduler-api/src/main/assembly/package.xml @@ -0,0 +1,74 @@ + + cluster + + dir + + false + + + src/main/resources + + **/*.properties + **/*.xml + **/*.json + + conf + + + ${project.parent.basedir}/dolphinscheduler-common/src/main/resources + + **/*.properties + **/*.xml + **/*.json + + conf + + + ${project.parent.basedir}/dolphinscheduler-common/src/main/resources/bin + + *.* + + 755 + bin + + + ${project.parent.basedir}/dolphinscheduler-dao/src/main/resources + + **/*.properties + **/*.xml + **/*.json + + conf + + + ${project.parent.basedir}/dolphinscheduler-api/src/main/resources + + **/*.properties + **/*.xml + **/*.json + + conf + + + target/ + + dolphinscheduler-api-${project.version}.jar + + lib + + + + + lib + true + + javax.servlet:servlet-api + org.eclipse.jetty.aggregate:jetty-all + org.slf4j:slf4j-log4j12 + + + + \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java new file mode 100644 index 0000000000..d999f375a0 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.web.servlet.ServletComponentScan; +import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; +import org.springframework.context.annotation.ComponentScan; +import springfox.documentation.swagger2.annotations.EnableSwagger2; + +@SpringBootApplication +@ServletComponentScan +@ComponentScan("cn.escheduler") +@EnableSwagger2 +public class ApiApplicationServer extends SpringBootServletInitializer { + + public static void main(String[] args) { + SpringApplication.run(ApiApplicationServer.class, args); + } + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/CombinedApplicationServer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/CombinedApplicationServer.java new file mode 100644 index 0000000000..df8c855949 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/CombinedApplicationServer.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api; + +import org.apache.dolphinscheduler.alert.AlertServer; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.server.master.MasterServer; +import org.apache.dolphinscheduler.server.rpc.LoggerServer; +import org.apache.dolphinscheduler.server.worker.WorkerServer; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.web.servlet.ServletComponentScan; +import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.ComponentScan; +import springfox.documentation.swagger2.annotations.EnableSwagger2; + +@SpringBootApplication +@ServletComponentScan +@ComponentScan("cn.escheduler") +@EnableSwagger2 +public class CombinedApplicationServer extends SpringBootServletInitializer { + + public static void main(String[] args) throws Exception { + + ConfigurableApplicationContext context = SpringApplication.run(ApiApplicationServer.class, args); + ProcessDao processDao = context.getBean(ProcessDao.class); + AlertDao alertDao = context.getBean(AlertDao.class); + + MasterServer master = new MasterServer(processDao); + master.run(processDao); + + WorkerServer workerServer = new WorkerServer(processDao, alertDao); + workerServer.run(processDao, alertDao); + + LoggerServer server = new LoggerServer(); + server.start(); + + AlertServer alertServer = AlertServer.getInstance(); + alertServer.start(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java new file mode 100644 index 0000000000..8ae2dbc529 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.configuration; + +import org.apache.dolphinscheduler.api.interceptor.LoginHandlerInterceptor; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.web.servlet.LocaleResolver; +import org.springframework.web.servlet.config.annotation.*; +import org.springframework.web.servlet.i18n.CookieLocaleResolver; +import org.springframework.web.servlet.i18n.LocaleChangeInterceptor; + +import java.util.Locale; + + +/** + * application configuration + */ +@Configuration +public class AppConfiguration implements WebMvcConfigurer { + + public static final String LOGIN_INTERCEPTOR_PATH_PATTERN = "/**/*"; + public static final String LOGIN_PATH_PATTERN = "/login"; + public static final String PATH_PATTERN = "/**"; + public static final String LOCALE_LANGUAGE_COOKIE = "language"; + public static final int COOKIE_MAX_AGE = 3600; + + + @Bean + public LoginHandlerInterceptor loginInterceptor() { + return new LoginHandlerInterceptor(); + } + + + /** + * Cookie + */ + @Bean(name = "localeResolver") + public LocaleResolver localeResolver() { + CookieLocaleResolver localeResolver = new CookieLocaleResolver(); + localeResolver.setCookieName(LOCALE_LANGUAGE_COOKIE); + /** set default locale **/ + localeResolver.setDefaultLocale(Locale.US); + /** set cookie max age **/ + localeResolver.setCookieMaxAge(COOKIE_MAX_AGE); + return localeResolver; + } + + @Bean + public LocaleChangeInterceptor localeChangeInterceptor() { + LocaleChangeInterceptor lci = new LocaleChangeInterceptor(); + /** **/ + lci.setParamName("language"); + + return lci; + } + + + @Override + public void addInterceptors(InterceptorRegistry registry) { + //i18n + registry.addInterceptor(localeChangeInterceptor()); + + registry.addInterceptor(loginInterceptor()).addPathPatterns(LOGIN_INTERCEPTOR_PATH_PATTERN).excludePathPatterns(LOGIN_PATH_PATTERN,"/swagger-resources/**", "/webjars/**", "/v2/**", "/doc.html", "*.html", "/ui/**"); + } + + + @Override + public void addResourceHandlers(ResourceHandlerRegistry registry) { + registry.addResourceHandler("/static/**").addResourceLocations("classpath:/static/"); + registry.addResourceHandler("doc.html").addResourceLocations("classpath:/META-INF/resources/"); + registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/"); + registry.addResourceHandler("/ui/**").addResourceLocations("file:ui/"); + } + + @Override + public void addViewControllers(ViewControllerRegistry registry) { + registry.addViewController("/ui/").setViewName("forward:/ui/index.html"); + registry.addViewController("/").setViewName("forward:/ui/index.html"); + } + + @Override + public void addCorsMappings(CorsRegistry registry) { + registry.addMapping(PATH_PATTERN).allowedOrigins("*").allowedMethods("*"); + } + + + /** + * Turn off suffix-based content negotiation + * + * @param configurer + */ + @Override + public void configureContentNegotiation(final ContentNegotiationConfigurer configurer) { + configurer.favorPathExtension(false); + } + + + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java new file mode 100644 index 0000000000..9821779ee4 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java @@ -0,0 +1,509 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.configuration; + +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.Multimap; +import io.swagger.models.*; +import io.swagger.models.parameters.Parameter; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.MessageSource; +import org.springframework.context.annotation.Primary; +import org.springframework.context.i18n.LocaleContextHolder; +import org.springframework.stereotype.Component; +import springfox.documentation.service.ApiInfo; +import springfox.documentation.service.ApiListing; +import springfox.documentation.service.Documentation; +import springfox.documentation.service.ResourceListing; +import springfox.documentation.swagger2.mappers.*; + +import java.util.*; + +import static com.google.common.collect.Maps.newTreeMap; + +/** + * application configuration + */ +@Component(value = "ServiceModelToSwagger2Mapper") +@Primary +public class ServiceModelToSwagger2MapperImpl extends ServiceModelToSwagger2Mapper { + + + @Autowired + private ModelMapper modelMapper; + @Autowired + private ParameterMapper parameterMapper; + @Autowired + private SecurityMapper securityMapper; + @Autowired + private LicenseMapper licenseMapper; + @Autowired + private VendorExtensionsMapper vendorExtensionsMapper; + + @Autowired + private MessageSource messageSource; + + @Override + public Swagger mapDocumentation(Documentation from) { + + if (from == null) { + return null; + } + + Swagger swagger = new Swagger(); + + swagger.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions())); + swagger.setSchemes(mapSchemes(from.getSchemes())); + swagger.setPaths(mapApiListings(from.getApiListings())); + swagger.setHost(from.getHost()); + swagger.setDefinitions(modelsFromApiListings( from.getApiListings() ) ); + swagger.setSecurityDefinitions(securityMapper.toSecuritySchemeDefinitions(from.getResourceListing())); + ApiInfo info = fromResourceListingInfo(from); + if (info != null) { + swagger.setInfo(mapApiInfo(info)); + } + swagger.setBasePath(from.getBasePath()); + swagger.setTags(tagSetToTagList(from.getTags())); + List list2 = from.getConsumes(); + if (list2 != null) { + swagger.setConsumes(new ArrayList(list2)); + } else { + swagger.setConsumes(null); + } + List list3 = from.getProduces(); + if (list3 != null) { + swagger.setProduces(new ArrayList(list3)); + } else { + swagger.setProduces(null); + } + + return swagger; + } + + + @Override + protected Info mapApiInfo(ApiInfo from) { + + if (from == null) { + return null; + } + + Info info = new Info(); + + info.setLicense(licenseMapper.apiInfoToLicense(from)); + info.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions())); + info.setTermsOfService(from.getTermsOfServiceUrl()); + info.setContact(map(from.getContact())); + info.setDescription(from.getDescription()); + info.setVersion(from.getVersion()); + info.setTitle(from.getTitle()); + + return info; + } + + @Override + protected Contact map(springfox.documentation.service.Contact from) { + + if (from == null) { + return null; + } + + Contact contact = new Contact(); + + contact.setName(from.getName()); + contact.setUrl(from.getUrl()); + contact.setEmail(from.getEmail()); + + return contact; + } + + @Override + protected io.swagger.models.Operation mapOperation(springfox.documentation.service.Operation from) { + + if (from == null) { + return null; + } + + Locale locale = LocaleContextHolder.getLocale(); + + io.swagger.models.Operation operation = new io.swagger.models.Operation(); + + operation.setSecurity(mapAuthorizations(from.getSecurityReferences())); + operation.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions())); + operation.setDescription(messageSource.getMessage(from.getNotes(), null, from.getNotes(), locale)); + operation.setOperationId(from.getUniqueId()); + operation.setResponses(mapResponseMessages(from.getResponseMessages())); + operation.setSchemes(stringSetToSchemeList(from.getProtocol())); + Set tagsSet = new HashSet<>(1); + + if(from.getTags() != null && from.getTags().size() > 0){ + + List list = new ArrayList(tagsSet.size()); + + Iterator it = from.getTags().iterator(); + while(it.hasNext()) + { + String tag = it.next(); + list.add(StringUtils.isNotBlank(tag) ? messageSource.getMessage(tag, null, tag, locale) : " "); + } + + operation.setTags(list); + }else { + operation.setTags(null); + } + + operation.setSummary(from.getSummary()); + Set set1 = from.getConsumes(); + if (set1 != null) { + operation.setConsumes(new ArrayList(set1)); + } else { + operation.setConsumes(null); + } + + Set set2 = from.getProduces(); + if (set2 != null) { + operation.setProduces(new ArrayList(set2)); + } else { + operation.setProduces(null); + } + + + operation.setParameters(parameterListToParameterList(from.getParameters())); + if (from.getDeprecated() != null) { + operation.setDeprecated(Boolean.parseBoolean(from.getDeprecated())); + } + + return operation; + } + + @Override + protected Tag mapTag(springfox.documentation.service.Tag from) { + + if (from == null) { + return null; + } + + Locale locale = LocaleContextHolder.getLocale(); + + Tag tag = new Tag(); + + tag.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions())); + tag.setName(messageSource.getMessage(from.getName(), null, from.getName(), locale)); + tag.setDescription(from.getDescription()); + + return tag; + } + + + private ApiInfo fromResourceListingInfo(Documentation documentation) { + + if (documentation == null) { + return null; + } + ResourceListing resourceListing = documentation.getResourceListing(); + if (resourceListing == null) { + return null; + } + ApiInfo info = resourceListing.getInfo(); + if (info == null) { + return null; + } + return info; + } + + protected List tagSetToTagList(Set set) { + + if (set == null) { + return null; + } + + List list = new ArrayList(set.size()); + for (springfox.documentation.service.Tag tag : set) { + list.add(mapTag(tag)); + } + + return list; + } + + protected List stringSetToSchemeList(Set set) { + if (set == null) { + return null; + } + + List list = new ArrayList(set.size()); + for (String string : set) { + list.add(Enum.valueOf(Scheme.class, string)); + } + + return list; + } + + protected List parameterListToParameterList(List list) { + if (list == null) { + return null; + } + + List list1 = new ArrayList(list.size()); + + Locale locale = LocaleContextHolder.getLocale(); + + for (springfox.documentation.service.Parameter param : list) { + String description = messageSource.getMessage(param.getDescription(), null, param.getDescription(), locale); + + springfox.documentation.service.Parameter parameter = new springfox.documentation.service.Parameter(param.getName(),description,param.getDefaultValue(),param.isRequired(),param.isAllowMultiple(),param.isAllowEmptyValue(),param.getModelRef(),param.getType(),param.getAllowableValues(),param.getParamType(),param.getParamAccess(),param.isHidden(),param.getPattern(),param.getCollectionFormat(),param.getOrder(),param.getScalarExample(),param.getExamples() ,param.getVendorExtentions()); + list1.add(parameterMapper.mapParameter(parameter)); + } + + return list1; + } + + + Map modelsFromApiListings(Multimap apiListings) { + Map definitions = newTreeMap(); + for (ApiListing each : apiListings.values()) { + definitions.putAll(each.getModels()); + } + return modelMapper.mapModels(definitions); + } + + + + + + +// +// +// +// private static final VendorExtensionsMapper vendorMapper = new VendorExtensionsMapper(); +// +// +// +// public Parameter mapParameter(springfox.documentation.service.Parameter source) { +// Parameter bodyParameter = bodyParameter(source); +// return SerializableParameterFactories.create(source).or(bodyParameter); +// } +// +// private Parameter bodyParameter(springfox.documentation.service.Parameter source) { +// BodyParameter parameter = new BodyParameter() +// .description(source.getDescription()) +// .name(source.getName()) +// .schema(fromModelRef(source.getModelRef())); +// parameter.setIn(source.getParamType()); +// parameter.setAccess(source.getParamAccess()); +// parameter.setPattern(source.getPattern()); +// parameter.setRequired(source.isRequired()); +// parameter.getVendorExtensions().putAll(vendorMapper.mapExtensions(source.getVendorExtentions())); +// for (Map.Entry> each : source.getExamples().asMap().entrySet()) { +// Optional example = FluentIterable.from(each.getValue()).first(); +// if (example.isPresent() && example.get().getValue() != null) { +// parameter.addExample(each.getKey(), String.valueOf(example.get().getValue())); +// } +// } +// +// //TODO: swagger-core Body parameter does not have an enum property +// return parameter; +// } +// +// Model fromModelRef(ModelReference modelRef) { +// if (modelRef.isCollection()) { +// if (modelRef.getItemType().equals("byte")) { +// ModelImpl baseModel = new ModelImpl(); +// baseModel.setType("string"); +// baseModel.setFormat("byte"); +// return maybeAddAllowableValuesToParameter(baseModel, modelRef.getAllowableValues()); +// } else if (modelRef.getItemType().equals("file")) { +// ArrayModel files = new ArrayModel(); +// files.items(new FileProperty()); +// return files; +// } +// ModelReference itemModel = modelRef.itemModel().get(); +// return new ArrayModel() +// .items(maybeAddAllowableValues(itemTypeProperty(itemModel), itemModel.getAllowableValues())); +// } +// if (modelRef.isMap()) { +// ModelImpl baseModel = new ModelImpl(); +// ModelReference itemModel = modelRef.itemModel().get(); +// baseModel.additionalProperties( +// maybeAddAllowableValues( +// itemTypeProperty(itemModel), +// itemModel.getAllowableValues())); +// return baseModel; +// } +// if (isBaseType(modelRef.getType())) { +// Property property = property(modelRef.getType()); +// ModelImpl baseModel = new ModelImpl(); +// baseModel.setType(property.getType()); +// baseModel.setFormat(property.getFormat()); +// return maybeAddAllowableValuesToParameter(baseModel, modelRef.getAllowableValues()); +// +// } +// return new RefModel(modelRef.getType()); +// } +// +// +// private static class Properties { +// private static final Map> typeFactory +// = ImmutableMap.>builder() +// .put("int", newInstanceOf(IntegerProperty.class)) +// .put("long", newInstanceOf(LongProperty.class)) +// .put("float", newInstanceOf(FloatProperty.class)) +// .put("double", newInstanceOf(DoubleProperty.class)) +// .put("string", newInstanceOf(StringProperty.class)) +// .put("boolean", newInstanceOf(BooleanProperty.class)) +// .put("date", newInstanceOf(DateProperty.class)) +// .put("date-time", newInstanceOf(DateTimeProperty.class)) +// .put("bigdecimal", newInstanceOf(DecimalProperty.class)) +// .put("biginteger", newInstanceOf(BaseIntegerProperty.class)) +// .put("uuid", newInstanceOf(UUIDProperty.class)) +// .put("object", newInstanceOf(ObjectProperty.class)) +// .put("byte", bytePropertyFactory()) +// .put("__file", filePropertyFactory()) +// .build(); +// +// private Properties() { +// throw new UnsupportedOperationException(); +// } +// +// public static Property property(final String typeName) { +// String safeTypeName = nullToEmpty(typeName); +// Function> propertyLookup +// = forMap(typeFactory, voidOrRef(safeTypeName)); +// return propertyLookup.apply(safeTypeName.toLowerCase()).apply(safeTypeName); +// } +// +// public static Property property(final ModelReference modelRef) { +// if (modelRef.isMap()) { +// return new MapProperty(property(modelRef.itemModel().get())); +// } else if (modelRef.isCollection()) { +// if ("byte".equals(modelRef.itemModel().transform(toTypeName()).or(""))) { +// return new ByteArrayProperty(); +// } +// return new ArrayProperty( +// maybeAddAllowableValues(itemTypeProperty(modelRef.itemModel().get()), modelRef.getAllowableValues())); +// } +// return property(modelRef.getType()); +// } +// +// private static Function toTypeName() { +// return new Function() { +// @Override +// public String apply(ModelReference input) { +// return input.getType(); +// } +// }; +// } +// +// public static Property itemTypeProperty(ModelReference paramModel) { +// if (paramModel.isCollection()) { +// return new ArrayProperty( +// maybeAddAllowableValues(itemTypeProperty(paramModel.itemModel().get()), paramModel.getAllowableValues())); +// } +// return property(paramModel.getType()); +// } +// +// private static Function newInstanceOf(final Class clazz) { +// return new Function() { +// @Override +// public T apply(String input) { +// try { +// return clazz.newInstance(); +// } catch (Exception e) { +// //This is bad! should never come here +// throw new IllegalStateException(e); +// } +// } +// }; +// } +// +// static Ordering defaultOrdering(Map properties) { +// return Ordering.from(byPosition(properties)).compound(byName()); +// } +// +// private static Function voidOrRef(final String typeName) { +// return new Function() { +// @Override +// public Property apply(String input) { +// if (typeName.equalsIgnoreCase("void")) { +// return null; +// } +// return new RefProperty(typeName); +// } +// }; +// } +// +// private static Function bytePropertyFactory() { +// return new Function() { +// @Override +// public Property apply(String input) { +// final IntegerProperty integerProperty = new IntegerProperty(); +// integerProperty.setFormat("int32"); +// integerProperty.setMaximum(BigDecimal.valueOf(Byte.MAX_VALUE)); +// integerProperty.setMinimum(BigDecimal.valueOf(Byte.MIN_VALUE)); +// return integerProperty; +// } +// }; +// } +// +// private static Function filePropertyFactory() { +// return new Function() { +// @Override +// public Property apply(String input) { +// return new FileProperty(); +// } +// }; +// } +// +// private static Comparator byName() { +// return new Comparator() { +// @Override +// public int compare(String first, String second) { +// return first.compareTo(second); +// } +// }; +// } +// +// private static Comparator byPosition(final Map modelProperties) { +// return new Comparator() { +// @Override +// public int compare(String first, String second) { +// ModelProperty p1 = modelProperties.get(first); +// ModelProperty p2 = modelProperties.get(second); +// return Ints.compare(p1.getPosition(), p2.getPosition()); +// } +// }; +// } +// +// static Predicate> voidProperties() { +// return new Predicate>() { +// @Override +// public boolean apply(Map.Entry input) { +// return isVoid(input.getValue().getType()) +// || collectionOfVoid(input.getValue().getType()) +// || arrayTypeOfVoid(input.getValue().getType().getArrayElementType()); +// } +// }; +// } +// +// private static boolean arrayTypeOfVoid(ResolvedType arrayElementType) { +// return arrayElementType != null && isVoid(arrayElementType); +// } +// +// private static boolean collectionOfVoid(ResolvedType type) { +// return isContainerType(type) && isVoid(collectionElementType(type)); +// } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/SwaggerConfig.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/SwaggerConfig.java new file mode 100644 index 0000000000..5015438c19 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/SwaggerConfig.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.configuration; + +import com.github.xiaoymin.swaggerbootstrapui.annotations.EnableSwaggerBootstrapUI; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; +import springfox.documentation.builders.ApiInfoBuilder; +import springfox.documentation.builders.PathSelectors; +import springfox.documentation.builders.RequestHandlerSelectors; +import springfox.documentation.service.ApiInfo; +import springfox.documentation.spi.DocumentationType; +import springfox.documentation.spring.web.plugins.Docket; +import springfox.documentation.swagger2.annotations.EnableSwagger2; + +/** + * + * swager2 config class
+ * + */ +@Configuration +@EnableSwagger2 +@EnableSwaggerBootstrapUI +public class SwaggerConfig implements WebMvcConfigurer { + + @Bean + public Docket createRestApi() { + return new Docket(DocumentationType.SWAGGER_2).apiInfo(apiInfo()).select() + .apis(RequestHandlerSelectors.basePackage("cn.escheduler.api.controller")).paths(PathSelectors.any()) + .build(); + } + + private ApiInfo apiInfo() { + return new ApiInfoBuilder().title("Easy Scheduler Api Docs").description("Easy Scheduler Api Docs") + .build(); + } + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java new file mode 100644 index 0000000000..efc6eb6647 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.AccessTokenService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +import static org.apache.dolphinscheduler.api.enums.Status.*; +/** + * access token controller + */ +@Api(tags = "ACCESS_TOKEN_TAG", position = 1) +@RestController +@RequestMapping("/access-token") +public class AccessTokenController extends BaseController{ + + + private static final Logger logger = LoggerFactory.getLogger(AccessTokenController.class); + + + @Autowired + private AccessTokenService accessTokenService; + + /** + * create token + * @param loginUser + * @return + */ + @ApiIgnore + @PostMapping(value = "/create") + @ResponseStatus(HttpStatus.CREATED) + public Result createToken(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userId") int userId, + @RequestParam(value = "expireTime") String expireTime, + @RequestParam(value = "token") String token){ + logger.info("login user {}, create token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), + userId,expireTime,token); + + try { + Map result = accessTokenService.createToken(userId, expireTime, token); + return returnDataList(result); + }catch (Exception e){ + logger.error(CREATE_ACCESS_TOKEN_ERROR.getMsg(),e); + return error(CREATE_ACCESS_TOKEN_ERROR.getCode(), CREATE_ACCESS_TOKEN_ERROR.getMsg()); + } + } + + /** + * create token + * @param loginUser + * @return + */ + @ApiIgnore + @PostMapping(value = "/generate") + @ResponseStatus(HttpStatus.CREATED) + public Result generateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userId") int userId, + @RequestParam(value = "expireTime") String expireTime){ + logger.info("login user {}, generate token , userId : {} , token expire time : {}",loginUser,userId,expireTime); + try { + Map result = accessTokenService.generateToken(userId, expireTime); + return returnDataList(result); + }catch (Exception e){ + logger.error(GENERATE_TOKEN_ERROR.getMsg(),e); + return error(GENERATE_TOKEN_ERROR.getCode(), GENERATE_TOKEN_ERROR.getMsg()); + } + } + + /** + * query access token list paging + * + * @param loginUser + * @param pageNo + * @param searchVal + * @param pageSize + * @return + */ + @ApiOperation(value = "queryAccessTokenList", notes= "QUERY_ACCESS_TOKEN_LIST_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + }) + @GetMapping(value="/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryAccessTokenList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize){ + logger.info("login user {}, list access token paging, pageNo: {}, searchVal: {}, pageSize: {}", + loginUser.getUserName(),pageNo,searchVal,pageSize); + try{ + Map result = checkPageParams(pageNo, pageSize); + if(result.get(Constants.STATUS) != Status.SUCCESS){ + return returnDataListPaging(result); + } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = accessTokenService.queryAccessTokenList(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg(),e); + return error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getCode(),QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg()); + } + } + + /** + * delete access token by id + * @param loginUser + * @param id + * @return + */ + @ApiIgnore + @PostMapping(value = "/delete") + @ResponseStatus(HttpStatus.OK) + public Result delAccessTokenById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int id) { + logger.info("login user {}, delete access token, id: {},", loginUser.getUserName(), id); + try { + Map result = accessTokenService.delAccessTokenById(loginUser, id); + return returnDataList(result); + }catch (Exception e){ + logger.error(DELETE_USER_BY_ID_ERROR.getMsg(),e); + return error(Status.DELETE_USER_BY_ID_ERROR.getCode(), Status.DELETE_USER_BY_ID_ERROR.getMsg()); + } + } + + + /** + * update token + * @param loginUser + * @return + */ + @ApiIgnore + @PostMapping(value = "/update") + @ResponseStatus(HttpStatus.CREATED) + public Result updateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int id, + @RequestParam(value = "userId") int userId, + @RequestParam(value = "expireTime") String expireTime, + @RequestParam(value = "token") String token){ + logger.info("login user {}, update token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), + userId,expireTime,token); + + try { + Map result = accessTokenService.updateToken(id,userId, expireTime, token); + return returnDataList(result); + }catch (Exception e){ + logger.error(CREATE_ACCESS_TOKEN_ERROR.getMsg(),e); + return error(CREATE_ACCESS_TOKEN_ERROR.getCode(), CREATE_ACCESS_TOKEN_ERROR.getMsg()); + } + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java new file mode 100644 index 0000000000..d12415cdd6 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java @@ -0,0 +1,252 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.service.AlertGroupService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.apache.dolphinscheduler.api.enums.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.HashMap; +import java.util.Map; + +/** + * alert group controller + */ +@Api(tags = "ALERT_GROUP_TAG", position = 1) +@RestController +@RequestMapping("alert-group") +public class AlertGroupController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class); + + @Autowired + private AlertGroupService alertGroupService; + + + /** + * create alert group + * @param loginUser + * @param groupName + * @param groupType + * @param desc + * @return + */ + @ApiOperation(value = "createAlertgroup", notes= "CREATE_ALERT_GROUP_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType ="AlertType"), + @ApiImplicitParam(name = "desc", value = "DESC", dataType ="String") + }) + @PostMapping(value = "/create") + @ResponseStatus(HttpStatus.CREATED) + public Result createAlertgroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "groupName") String groupName, + @RequestParam(value = "groupType") AlertType groupType, + @RequestParam(value = "desc",required = false) String desc) { + logger.info("loginUser user {}, create alertgroup, groupName: {}, groupType: {}, desc: {}", + loginUser.getUserName(), groupName, groupType,desc); + try { + Map result = alertGroupService.createAlertgroup(loginUser, groupName, groupType,desc); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.CREATE_ALERT_GROUP_ERROR.getMsg(),e); + return error(Status.CREATE_ALERT_GROUP_ERROR.getCode(), Status.CREATE_ALERT_GROUP_ERROR.getMsg()); + } + } + + /** + * alert group list + * @param loginUser + * @return + */ + @ApiOperation(value = "list", notes= "QUERY_ALERT_GROUP_LIST_NOTES") + @GetMapping(value = "/list") + @ResponseStatus(HttpStatus.OK) + public Result list(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { + logger.info("login user {}, query all alertGroup", + loginUser.getUserName()); + try{ + HashMap result = alertGroupService.queryAlertgroup(); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.QUERY_ALL_ALERTGROUP_ERROR.getMsg(),e); + return error(Status.QUERY_ALL_ALERTGROUP_ERROR.getCode(), Status.QUERY_ALL_ALERTGROUP_ERROR.getMsg()); + } + } + + /** + * paging query alarm group list + * + * @param loginUser + * @param pageNo + * @param searchVal + * @param pageSize + * @return + */ + @ApiOperation(value = "queryTaskListPaging", notes= "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") + }) + @GetMapping(value="/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize){ + logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", + loginUser.getUserName(),pageNo,searchVal,pageSize); + try{ + Map result = checkPageParams(pageNo, pageSize); + if(result.get(Constants.STATUS) != Status.SUCCESS){ + return returnDataListPaging(result); + } + + searchVal = ParameterUtils.handleEscapes(searchVal); + result = alertGroupService.listPaging(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(Status.LIST_PAGING_ALERT_GROUP_ERROR.getMsg(),e); + return error(Status.LIST_PAGING_ALERT_GROUP_ERROR.getCode(), Status.LIST_PAGING_ALERT_GROUP_ERROR.getMsg()); + } + } + + /** + * updateProcessInstance alert group + * @param loginUser + * @param id + * @param groupName + * @param groupType + * @param desc + * @return + */ + @ApiOperation(value = "updateAlertgroup", notes= "UPDATE_ALERT_GROUP_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100"), + @ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType ="AlertType"), + @ApiImplicitParam(name = "desc", value = "DESC", dataType ="String") + }) + @PostMapping(value = "/update") + @ResponseStatus(HttpStatus.OK) + public Result updateAlertgroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int id, + @RequestParam(value = "groupName") String groupName, + @RequestParam(value = "groupType") AlertType groupType, + @RequestParam(value = "desc",required = false) String desc) { + logger.info("login user {}, updateProcessInstance alertgroup, groupName: {}, groupType: {}, desc: {}", + loginUser.getUserName(), groupName, groupType,desc); + try { + Map result = alertGroupService.updateAlertgroup(loginUser, id, groupName, groupType, desc); + return returnDataList(result); + + }catch (Exception e){ + logger.error(Status.UPDATE_ALERT_GROUP_ERROR.getMsg(),e); + return error(Status.UPDATE_ALERT_GROUP_ERROR.getCode(), Status.UPDATE_ALERT_GROUP_ERROR.getMsg()); + } + } + + /** + * delete alert group by id + * @param loginUser + * @param id + * @return + */ + @ApiOperation(value = "delAlertgroupById", notes= "DELETE_ALERT_GROUP_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100") + }) + @PostMapping(value = "/delete") + @ResponseStatus(HttpStatus.OK) + public Result delAlertgroupById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int id) { + logger.info("login user {}, delete AlertGroup, id: {},", loginUser.getUserName(), id); + try { + Map result = alertGroupService.delAlertgroupById(loginUser, id); + return returnDataList(result); + + }catch (Exception e){ + logger.error(Status.DELETE_ALERT_GROUP_ERROR.getMsg(),e); + return error(Status.DELETE_ALERT_GROUP_ERROR.getCode(), Status.DELETE_ALERT_GROUP_ERROR.getMsg()); + } + } + + + /** + * check alert group exist + * @param loginUser + * @param groupName + * @return + */ + @ApiOperation(value = "verifyGroupName", notes= "VERIFY_ALERT_GROUP_NAME_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"), + }) + @GetMapping(value = "/verify-group-name") + @ResponseStatus(HttpStatus.OK) + public Result verifyGroupName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="groupName") String groupName + ) { + logger.info("login user {}, verfiy group name: {}", + loginUser.getUserName(),groupName); + + return alertGroupService.verifyGroupName(loginUser, groupName); + } + + /** + * grant user + * + * @param loginUser + * @param userIds + * @return + */ + @ApiOperation(value = "grantUser", notes= "GRANT_ALERT_GROUP_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100"), + @ApiImplicitParam(name = "userIds", value = "USER_IDS", required = true, dataType = "String") + }) + @PostMapping(value = "/grant-user") + @ResponseStatus(HttpStatus.OK) + public Result grantUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "alertgroupId") int alertgroupId, + @RequestParam(value = "userIds") String userIds) { + logger.info("login user {}, grant user, alertGroupId: {},userIds : {}", loginUser.getUserName(), alertgroupId,userIds); + try { + Map result = alertGroupService.grantUser(loginUser, alertgroupId, userIds); + return returnDataList(result); + + }catch (Exception e){ + logger.error(Status.ALERT_GROUP_GRANT_USER_ERROR.getMsg(),e); + return error(Status.ALERT_GROUP_GRANT_USER_ERROR.getCode(), Status.ALERT_GROUP_GRANT_USER_ERROR.getMsg()); + } + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java new file mode 100644 index 0000000000..75bea3e4b6 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/BaseController.java @@ -0,0 +1,272 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.commons.lang3.StringUtils; + +import javax.servlet.http.HttpServletRequest; +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * base controller + */ +public class BaseController { + + /** + * check params + * + * @param pageNo + * @param pageSize + * @return + */ + public Map checkPageParams(int pageNo, int pageSize) { + Map result = new HashMap<>(2); + Status resultEnum = Status.SUCCESS; + String msg = Status.SUCCESS.getMsg(); + if (pageNo <= 0) { + resultEnum = Status.REQUEST_PARAMS_NOT_VALID_ERROR; + msg = MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), Constants.PAGE_NUMBER); + } else if (pageSize <= 0) { + resultEnum = Status.REQUEST_PARAMS_NOT_VALID_ERROR; + msg = MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), Constants.PAGE_SIZE); + } + result.put(Constants.STATUS, resultEnum); + result.put(Constants.MSG, msg); + return result; + } + + /** + * get ip address in the http request + * + * @param request + * @return client ip address + */ + public static String getClientIpAddress(HttpServletRequest request) { + String clientIp = request.getHeader(HTTP_X_FORWARDED_FOR); + + if (StringUtils.isNotEmpty(clientIp) && !StringUtils.equalsIgnoreCase(HTTP_HEADER_UNKNOWN, clientIp)) { + int index = clientIp.indexOf(COMMA); + if (index != -1) { + return clientIp.substring(0, index); + } else { + return clientIp; + } + } + + clientIp = request.getHeader(HTTP_X_REAL_IP); + if (StringUtils.isNotEmpty(clientIp) && !StringUtils.equalsIgnoreCase(HTTP_HEADER_UNKNOWN, clientIp)) { + return clientIp; + } + + return request.getRemoteAddr(); + } + + /** + * return data list + * + * @param result + * @return + */ + public Result returnDataList(Map result) { + Status status = (Status) result.get(Constants.STATUS); + if (status == Status.SUCCESS) { + String msg = Status.SUCCESS.getMsg(); + Object datalist = result.get(Constants.DATA_LIST); + return success(msg, datalist); + } else { + Integer code = status.getCode(); + String msg = (String) result.get(Constants.MSG); + return error(code, msg); + } + } + + /** + * return data list with paging + * @param result + * @return + */ + public Result returnDataListPaging(Map result) { + Status status = (Status) result.get(Constants.STATUS); + if (status == Status.SUCCESS) { + result.put(Constants.MSG, Status.SUCCESS.getMsg()); + PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); + return success(pageInfo.getLists(), pageInfo.getCurrentPage(), pageInfo.getTotalCount(), + pageInfo.getTotalPage()); + } else { + Integer code = status.getCode(); + String msg = (String) result.get(Constants.MSG); + return error(code, msg); + } + } + + /** + * success + * + * @return + */ + public Result success() { + Result result = new Result(); + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(Status.SUCCESS.getMsg()); + + return result; + } + + /** + * success does not need to return data + * + * @param msg + * @return + */ + public Result success(String msg) { + Result result = new Result(); + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(msg); + + return result; + } + + /** + * return data no paging + * + * @param msg + * @param list + * @return + */ + public Result success(String msg, Object list) { + Result result = getResult(msg, list); + return result; + } + + /** + * return data no paging + * + * @param list + * @return + */ + public Result success(Object list) { + Result result = getResult(Status.SUCCESS.getMsg(), list); + return result; + } + + /** + * return the data use Map format, for example, passing the value of key, value, passing a value + * eg. "/user/add" then return user name: zhangsan + * + * @param msg + * @param object + * @return + */ + public Result success(String msg, Map object) { + Result result = getResult(msg, object); + return result; + } + + /** + * return data with paging + * + * @param totalList + * @param currentPage + * @param total + * @return + */ + public Result success(Object totalList, Integer currentPage, + Integer total, Integer totalPage) { + Result result = new Result(); + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(Status.SUCCESS.getMsg()); + + Map map = new HashMap<>(4); + map.put(Constants.TOTAL_LIST, totalList); + map.put(Constants.CURRENT_PAGE, currentPage); + map.put(Constants.TOTAL_PAGE, totalPage); + map.put(Constants.TOTAL, total); + result.setData(map); + return result; + } + + /** + * error handle + * + * @param code + * @param msg + * @return + */ + public Result error(Integer code, String msg) { + Result result = new Result(); + result.setCode(code); + result.setMsg(msg); + return result; + } + + /** + * put message to map + * + * @param result + * @param status + * @param statusParams + */ + protected void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } + } + + /** + * put message to result object + * + * @param result + * @param status + */ + protected void putMsg(Result result, Status status, Object... statusParams) { + result.setCode(status.getCode()); + + if (statusParams != null && statusParams.length > 0) { + result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.setMsg(status.getMsg()); + } + + } + + /** + * get result + * @param msg + * @param list + * @return + */ + private Result getResult(String msg, Object list) { + Result result = new Result(); + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(msg); + + result.setData(list); + return result; + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java new file mode 100644 index 0000000000..76b4f244b7 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java @@ -0,0 +1,199 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.service.DataAnalysisService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.apache.dolphinscheduler.api.enums.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +/** + * data analysis controller + */ +@Api(tags = "DATA_ANALYSIS_TAG", position = 1) +@RestController +@RequestMapping("projects/analysis") +public class DataAnalysisController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(DataAnalysisController.class); + + + @Autowired + DataAnalysisService dataAnalysisService; + + + /** + * statistical task instance status data + * + * @param loginUser + * @param projectId + * @return + */ + @ApiOperation(value = "countTaskState", notes= "COUNT_TASK_STATE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + }) + @GetMapping(value="/task-state-count") + @ResponseStatus(HttpStatus.OK) + public Result countTaskState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value="startDate", required=false) String startDate, + @RequestParam(value="endDate", required=false) String endDate, + @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ + try{ + logger.info("count task state, user:{}, start date: {}, end date:{}, project id {}", + loginUser.getUserName(), startDate, endDate, projectId); + Map result = dataAnalysisService.countTaskStateByProject(loginUser,projectId, startDate, endDate); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.TASK_INSTANCE_STATE_COUNT_ERROR.getMsg(),e); + return error(Status.TASK_INSTANCE_STATE_COUNT_ERROR.getCode(), Status.TASK_INSTANCE_STATE_COUNT_ERROR.getMsg()); + } + } + + /** + * statistical process instance status data + * + * @param loginUser + * @param projectId + * @return + */ + @ApiOperation(value = "countProcessInstanceState", notes= "COUNT_PROCESS_INSTANCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + }) + @GetMapping(value="/process-state-count") + @ResponseStatus(HttpStatus.OK) + public Result countProcessInstanceState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value="startDate", required=false) String startDate, + @RequestParam(value="endDate", required=false) String endDate, + @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ + try{ + logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id", + loginUser.getUserName(), startDate, endDate, projectId); + Map result = dataAnalysisService.countProcessInstanceStateByProject(loginUser, projectId, startDate, endDate); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg(),e); + return error(Status.COUNT_PROCESS_INSTANCE_STATE_ERROR.getCode(), Status.COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg()); + } + } + + /** + * statistics the process definition quantities of certain person + * + * @param loginUser + * @param projectId + * @return + */ + @ApiOperation(value = "countDefinitionByUser", notes= "COUNT_PROCESS_DEFINITION_BY_USER_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + }) + @GetMapping(value="/define-user-count") + @ResponseStatus(HttpStatus.OK) + public Result countDefinitionByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ + try{ + logger.info("count process definition , user:{}, project id", + loginUser.getUserName(), projectId); + Map result = dataAnalysisService.countDefinitionByUser(loginUser, projectId); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg(),e); + return error(Status.COUNT_PROCESS_DEFINITION_USER_ERROR.getCode(), Status.COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg()); + } + } + + + /** + * statistical command status data + * + * @param loginUser + * @param projectId + * @return + */ + @ApiOperation(value = "countCommandState", notes= "COUNT_COMMAND_STATE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + }) + @GetMapping(value="/command-state-count") + @ResponseStatus(HttpStatus.OK) + public Result countCommandState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value="startDate", required=false) String startDate, + @RequestParam(value="endDate", required=false) String endDate, + @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ + try{ + logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}", + loginUser.getUserName(), startDate, endDate, projectId); + Map result = dataAnalysisService.countCommandState(loginUser, projectId, startDate, endDate); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.COMMAND_STATE_COUNT_ERROR.getMsg(),e); + return error(Status.COMMAND_STATE_COUNT_ERROR.getCode(), Status.COMMAND_STATE_COUNT_ERROR.getMsg()); + } + } + + /** + * queue count + * + * @param loginUser + * @param projectId + * @return + */ + @ApiOperation(value = "countQueueState", notes= "COUNT_QUEUE_STATE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + }) + @GetMapping(value="/queue-count") + @ResponseStatus(HttpStatus.OK) + public Result countQueueState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ + try{ + logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}", + loginUser.getUserName(), projectId); + Map result = dataAnalysisService.countQueueState(loginUser, projectId); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.QUEUE_COUNT_ERROR.getMsg(),e); + return error(Status.QUEUE_COUNT_ERROR.getCode(), Status.QUEUE_COUNT_ERROR.getMsg()); + } + } + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java new file mode 100644 index 0000000000..1f9a30098f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java @@ -0,0 +1,458 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.DataSourceService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.CommonUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +import static org.apache.dolphinscheduler.api.enums.Status.*; +/** + * data source controller + */ +@Api(tags = "DATA_SOURCE_TAG", position = 3) +@RestController +@RequestMapping("datasources") +public class DataSourceController extends BaseController { + + private static final Logger logger = LoggerFactory.getLogger(DataSourceController.class); + + @Autowired + private DataSourceService dataSourceService; + + /** + * create data source + * @param loginUser + * @param name + * @param note + * @param type + * @param host + * @param port + * @param database + * @param principal + * @param userName + * @param password + * @param other + * @return + */ + @ApiOperation(value = "createDataSource", notes= "CREATE_DATA_SOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"), + @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"), + @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"), + @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"), + @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String") + }) + @PostMapping(value = "/create") + @ResponseStatus(HttpStatus.CREATED) + public Result createDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("name") String name, + @RequestParam(value = "note", required = false) String note, + @RequestParam(value = "type") DbType type, + @RequestParam(value = "host") String host, + @RequestParam(value = "port") String port, + @RequestParam(value = "database") String database, + @RequestParam(value = "principal") String principal, + @RequestParam(value = "userName") String userName, + @RequestParam(value = "password") String password, + @RequestParam(value = "other") String other) { + logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {},port: {},database : {},principal: {},userName : {} other: {}", + loginUser.getUserName(), name, note, type, host,port,database,principal,userName,other); + try { + String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other); + Map result = dataSourceService.createDataSource(loginUser, name, note, type, parameter); + return returnDataList(result); + + } catch (Exception e) { + logger.error(CREATE_DATASOURCE_ERROR.getMsg(),e); + return error(Status.CREATE_DATASOURCE_ERROR.getCode(), Status.CREATE_DATASOURCE_ERROR.getMsg()); + } + } + + + /** + * updateProcessInstance data source + * + * @param loginUser + * @param name + * @param note + * @param type + * @param other + * @return + */ + @ApiOperation(value = "updateDataSource", notes= "UPDATE_DATA_SOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"), + @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"), + @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"), + @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"), + @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String") + }) + @PostMapping(value = "/update") + @ResponseStatus(HttpStatus.OK) + public Result updateDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("id") int id, + @RequestParam("name") String name, + @RequestParam(value = "note", required = false) String note, + @RequestParam(value = "type") DbType type, + @RequestParam(value = "host") String host, + @RequestParam(value = "port") String port, + @RequestParam(value = "database") String database, + @RequestParam(value = "principal") String principal, + @RequestParam(value = "userName") String userName, + @RequestParam(value = "password") String password, + @RequestParam(value = "other") String other) { + logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, other: {}", + loginUser.getUserName(), name, note, type, other); + try { + String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal, userName, password, other); + Map dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter); + return returnDataList(dataSource); + } catch (Exception e) { + logger.error(UPDATE_DATASOURCE_ERROR.getMsg(),e); + return error(UPDATE_DATASOURCE_ERROR.getCode(), UPDATE_DATASOURCE_ERROR.getMsg()); + } + + + } + + /** + * query data source + * + * @param loginUser + * @param id + * @return + */ + @ApiOperation(value = "queryDataSource", notes= "QUERY_DATA_SOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100") + + }) + @PostMapping(value = "/update-ui") + @ResponseStatus(HttpStatus.OK) + public Result queryDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("id") int id) { + logger.info("login user {}, query datasource: {}", + loginUser.getUserName(), id); + try { + Map result = dataSourceService.queryDataSource(id); + return returnDataList(result); + } catch (Exception e) { + logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); + return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg()); + } + + + } + + /** + * query datasouce by type + * + * @param loginUser + * @return + */ + @ApiOperation(value = "queryDataSourceList", notes= "QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType") + }) + @GetMapping(value = "/list") + @ResponseStatus(HttpStatus.OK) + public Result queryDataSourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("type") DbType type) { + try { + Map result = dataSourceService.queryDataSourceList(loginUser, type.ordinal()); + return returnDataList(result); + } catch (Exception e) { + logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); + return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg()); + } + } + + /** + * query datasource with paging + * + * @param loginUser + * @param searchVal + * @param pageNo + * @param pageSize + * @return + */ + @ApiOperation(value = "queryDataSourceListPaging", notes= "QUERY_DATA_SOURCE_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + }) + @GetMapping(value = "/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryDataSourceListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + try { + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return returnDataListPaging(result); + } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); + } catch (Exception e) { + logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); + return error(QUERY_DATASOURCE_ERROR.getCode(), QUERY_DATASOURCE_ERROR.getMsg()); + } + } + + /** + * connec datasource + * + * @param loginUser + * @param name + * @param note + * @param type + * @param other + * @return + */ + @ApiOperation(value = "connectDataSource", notes= "CONNECT_DATA_SOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"), + @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"), + @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"), + @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"), + @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String") + }) + @PostMapping(value = "/connect") + @ResponseStatus(HttpStatus.OK) + public Result connectDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("name") String name, + @RequestParam(value = "note", required = false) String note, + @RequestParam(value = "type") DbType type, + @RequestParam(value = "host") String host, + @RequestParam(value = "port") String port, + @RequestParam(value = "database") String database, + @RequestParam(value = "principal") String principal, + @RequestParam(value = "userName") String userName, + @RequestParam(value = "password") String password, + @RequestParam(value = "other") String other) { + logger.info("login user {}, connect datasource: {} failure, note: {}, type: {}, other: {}", + loginUser.getUserName(), name, note, type, other); + try { + String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other); + Boolean isConnection = dataSourceService.checkConnection(type, parameter); + Result result = new Result(); + + if (isConnection) { + putMsg(result, SUCCESS); + } else { + putMsg(result, CONNECT_DATASOURCE_FAILURE); + } + return result; + } catch (Exception e) { + logger.error(CONNECT_DATASOURCE_FAILURE.getMsg(),e); + return error(CONNECT_DATASOURCE_FAILURE.getCode(), CONNECT_DATASOURCE_FAILURE.getMsg()); + } + } + + /** + * connection test + * + * @param loginUser + * @return + */ + @ApiOperation(value = "connectionTest", notes= "CONNECT_DATA_SOURCE_TEST_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/connect-by-id") + @ResponseStatus(HttpStatus.OK) + public Result connectionTest(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("id") int id) { + logger.info("connection test, login user:{}, id:{}", loginUser.getUserName(), id); + + try { + Boolean isConnection = dataSourceService.connectionTest(loginUser, id); + Result result = new Result(); + + if (isConnection) { + putMsg(result, SUCCESS); + } else { + putMsg(result, CONNECTION_TEST_FAILURE); + } + return result; + } catch (Exception e) { + logger.error(CONNECTION_TEST_FAILURE.getMsg(),e); + return error(CONNECTION_TEST_FAILURE.getCode(), CONNECTION_TEST_FAILURE.getMsg()); + } + + } + + /** + * delete datasource by id + * + * @param loginUser + * @param id datasource id + * @return + */ + @ApiOperation(value = "delete", notes= "DELETE_DATA_SOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/delete") + @ResponseStatus(HttpStatus.OK) + public Result delete(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("id") int id) { + try { + logger.info("delete datasource,login user:{}, id:{}", loginUser.getUserName(), id); + return dataSourceService.delete(loginUser, id); + } catch (Exception e) { + logger.error(DELETE_DATA_SOURCE_FAILURE.getMsg(),e); + return error(DELETE_DATA_SOURCE_FAILURE.getCode(), DELETE_DATA_SOURCE_FAILURE.getMsg()); + } + } + + /** + * verify datasource name + * + * @param loginUser + * @param name + * @return + */ + @ApiOperation(value = "verifyDataSourceName", notes= "VERIFY_DATA_SOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String") + }) + @GetMapping(value = "/verify-name") + @ResponseStatus(HttpStatus.OK) + public Result verifyDataSourceName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "name") String name + ) { + logger.info("login user {}, verfiy datasource name: {}", + loginUser.getUserName(), name); + + try { + return dataSourceService.verifyDataSourceName(loginUser, name); + } catch (Exception e) { + logger.error(VERFIY_DATASOURCE_NAME_FAILURE.getMsg(),e); + return error(VERFIY_DATASOURCE_NAME_FAILURE.getCode(), VERFIY_DATASOURCE_NAME_FAILURE.getMsg()); + } + } + + + + /** + * unauthorized datasource + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "unauthDatasource", notes= "UNAUTHORIZED_DATA_SOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/unauth-datasource") + @ResponseStatus(HttpStatus.OK) + public Result unauthDatasource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("userId") Integer userId) { + try { + logger.info("unauthorized datasource, login user:{}, unauthorized userId:{}", + loginUser.getUserName(), userId); + Map result = dataSourceService.unauthDatasource(loginUser, userId); + return returnDataList(result); + } catch (Exception e) { + logger.error(UNAUTHORIZED_DATASOURCE.getMsg(),e); + return error(UNAUTHORIZED_DATASOURCE.getCode(), UNAUTHORIZED_DATASOURCE.getMsg()); + } + } + + + /** + * authorized datasource + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "authedDatasource", notes= "AUTHORIZED_DATA_SOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/authed-datasource") + @ResponseStatus(HttpStatus.OK) + public Result authedDatasource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("userId") Integer userId) { + try { + logger.info("authorized data source, login user:{}, authorized useId:{}", + loginUser.getUserName(), userId); + Map result = dataSourceService.authedDatasource(loginUser, userId); + return returnDataList(result); + } catch (Exception e) { + logger.error(AUTHORIZED_DATA_SOURCE.getMsg(),e); + return error(AUTHORIZED_DATA_SOURCE.getCode(), AUTHORIZED_DATA_SOURCE.getMsg()); + } + } + + /** + * get user info + * + * @param loginUser + * @return + */ + @ApiOperation(value = "getKerberosStartupState", notes= "GET_USER_INFO_NOTES") + @GetMapping(value="/kerberos-startup-state") + @ResponseStatus(HttpStatus.OK) + public Result getKerberosStartupState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + logger.info("login user {},get kerberos startup state : {}", loginUser.getUserName()); + try{ + // if upload resource is HDFS and kerberos startup is true , else false + return success(Status.SUCCESS.getMsg(), CommonUtils.getKerberosStartupState()); + }catch (Exception e){ + logger.error(KERBEROS_STARTUP_STATE.getMsg(),e); + return error(Status.KERBEROS_STARTUP_STATE.getCode(), Status.KERBEROS_STARTUP_STATE.getMsg()); + } + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java new file mode 100644 index 0000000000..68b6985ffe --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java @@ -0,0 +1,199 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.enums.ExecuteType; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ExecutorService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.*; +import org.apache.dolphinscheduler.common.enums.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + + +/** + * execute process controller + */ +@Api(tags = "PROCESS_INSTANCE_EXECUTOR_TAG", position = 1) +@RestController +@RequestMapping("projects/{projectName}/executors") +public class ExecutorController extends BaseController { + + private static final Logger logger = LoggerFactory.getLogger(ExecutorController.class); + + @Autowired + private ExecutorService execService; + + /** + * execute process instance + */ + @ApiOperation(value = "startProcessInstance", notes= "RUN_PROCESS_INSTANCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", required = true, dataType = "String"), + @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType ="FailureStrategy"), + @ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType ="String"), + @ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType ="TaskDependType"), + @ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType ="CommandType"), + @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE",required = true, dataType ="WarningType"), + @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID",required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "receivers", value = "RECEIVERS",dataType ="String" ), + @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC",dataType ="String" ), + @ApiImplicitParam(name = "runMode", value = "RUN_MODE",dataType ="RunMode" ), + @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority" ), + @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int",example = "100"), + @ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int",example = "100"), + }) + @PostMapping(value = "start-process-instance") + @ResponseStatus(HttpStatus.OK) + public Result startProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processDefinitionId") int processDefinitionId, + @RequestParam(value = "scheduleTime", required = false) String scheduleTime, + @RequestParam(value = "failureStrategy", required = true) FailureStrategy failureStrategy, + @RequestParam(value = "startNodeList", required = false) String startNodeList, + @RequestParam(value = "taskDependType", required = false) TaskDependType taskDependType, + @RequestParam(value = "execType", required = false) CommandType execType, + @RequestParam(value = "warningType", required = true) WarningType warningType, + @RequestParam(value = "warningGroupId", required = false) int warningGroupId, + @RequestParam(value = "receivers", required = false) String receivers, + @RequestParam(value = "receiversCc", required = false) String receiversCc, + @RequestParam(value = "runMode", required = false) RunMode runMode, + @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, + @RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId, + @RequestParam(value = "timeout", required = false) Integer timeout) { + try { + logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, " + + "failure policy: {}, node name: {}, node dep: {}, notify type: {}, " + + "notify group id: {},receivers:{},receiversCc:{}, run mode: {},process instance priority:{}, workerGroupId: {}, timeout: {}", + loginUser.getUserName(), projectName, processDefinitionId, scheduleTime, + failureStrategy, startNodeList, taskDependType, warningType, warningGroupId,receivers,receiversCc,runMode,processInstancePriority, + workerGroupId, timeout); + + if (timeout == null) { + timeout = org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT; + } + + Map result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy, + startNodeList, taskDependType, warningType, + warningGroupId,receivers,receiversCc, runMode,processInstancePriority, workerGroupId, timeout); + return returnDataList(result); + } catch (Exception e) { + logger.error(Status.START_PROCESS_INSTANCE_ERROR.getMsg(),e); + return error(Status.START_PROCESS_INSTANCE_ERROR.getCode(), Status.START_PROCESS_INSTANCE_ERROR.getMsg()); + } + } + + + /** + * do action to process instance:pause, stop, repeat, recover from pause, recover from stop + * + * @param loginUser + * @param projectName + * @param processInstanceId + * @return + */ + @ApiOperation(value = "execute", notes= "EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "executeType", value = "EXECUTE_TYPE", required = true, dataType = "ExecuteType") + }) + @PostMapping(value = "/execute") + @ResponseStatus(HttpStatus.OK) + public Result execute(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processInstanceId") Integer processInstanceId, + @RequestParam("executeType") ExecuteType executeType + ) { + try { + logger.info("execute command, login user: {}, project:{}, process instance id:{}, execute type:{}", + loginUser.getUserName(), projectName, processInstanceId, executeType.toString()); + Map result = execService.execute(loginUser, projectName, processInstanceId, executeType); + return returnDataList(result); + } catch (Exception e) { + logger.error(Status.EXECUTE_PROCESS_INSTANCE_ERROR.getMsg(),e); + return error(Status.EXECUTE_PROCESS_INSTANCE_ERROR.getCode(), Status.EXECUTE_PROCESS_INSTANCE_ERROR.getMsg()); + } + } + + /** + * check process definition and all of the son process definitions is on line. + * + * @param loginUser + * @param processDefinitionId + * @return + */ + @ApiOperation(value = "startCheckProcessDefinition", notes= "START_CHECK_PROCESS_DEFINITION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") + }) + @PostMapping(value = "/start-check") + @ResponseStatus(HttpStatus.OK) + public Result startCheckProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "processDefinitionId") int processDefinitionId) { + logger.info("login user {}, check process definition", loginUser.getUserName(), processDefinitionId); + try { + Map result = execService.startCheckByProcessDefinedId(processDefinitionId); + return returnDataList(result); + + } catch (Exception e) { + logger.error(Status.CHECK_PROCESS_DEFINITION_ERROR.getMsg(),e); + return error(Status.CHECK_PROCESS_DEFINITION_ERROR.getCode(), Status.CHECK_PROCESS_DEFINITION_ERROR.getMsg()); + } + } + + /** + * query recipients and copyers by process definition ID + * + * @param loginUser + * @param processDefinitionId + * @return + */ + @ApiIgnore + @ApiOperation(value = "getReceiverCc", notes= "GET_RECEIVER_CC_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100") + + }) + @GetMapping(value = "/get-receiver-cc") + @ResponseStatus(HttpStatus.OK) + public Result getReceiverCc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "processDefinitionId",required = false) Integer processDefinitionId, + @RequestParam(value = "processInstanceId",required = false) Integer processInstanceId) { + logger.info("login user {}, get process definition receiver and cc", loginUser.getUserName()); + try { + Map result = execService.getReceiverCc(processDefinitionId,processInstanceId); + return returnDataList(result); + } catch (Exception e) { + logger.error(Status.QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg(),e); + return error(Status.QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getCode(), Status.QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg()); + } + } + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java new file mode 100644 index 0000000000..e04101ad7f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.service.LoggerService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.apache.dolphinscheduler.api.enums.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + + +/** + * log controller + */ +@Api(tags = "LOGGER_TAG", position = 13) +@RestController +@RequestMapping("/log") +public class LoggerController extends BaseController { + + private static final Logger logger = LoggerFactory.getLogger(LoggerController.class); + + + @Autowired + private LoggerService loggerService; + + /** + * query task log + */ + @ApiOperation(value = "queryLog", notes= "QUERY_TASK_INSTANCE_LOG_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "taskInstId", value = "TASK_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", dataType ="Int", example = "100"), + @ApiImplicitParam(name = "limit", value = "LIMIT", dataType ="Int", example = "100") + }) + @GetMapping(value = "/detail") + @ResponseStatus(HttpStatus.OK) + public Result queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "taskInstId") int taskInstanceId, + @RequestParam(value = "skipLineNum") int skipNum, + @RequestParam(value = "limit") int limit) { + try { + + logger.info( + "login user {}, view {} task instance log ,skipLineNum {} , limit {}", loginUser.getUserName(), taskInstanceId, skipNum, limit); + return loggerService.queryLog(taskInstanceId, skipNum, limit); + } catch (Exception e) { + logger.error(Status.QUERY_TASK_INSTANCE_LOG_ERROR.getMsg(), e); + return error(Status.QUERY_TASK_INSTANCE_LOG_ERROR.getCode(), Status.QUERY_TASK_INSTANCE_LOG_ERROR.getMsg()); + } + } + + + /** + * download log file + * + * @param loginUser + * @param taskInstanceId + */ + @ApiOperation(value = "downloadTaskLog", notes= "DOWNLOAD_TASK_INSTANCE_LOG_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "taskInstId", value = "TASK_ID",dataType = "Int", example = "100") + }) + @GetMapping(value = "/download-log") + @ResponseBody + public ResponseEntity downloadTaskLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "taskInstId") int taskInstanceId) { + try { + byte[] logBytes = loggerService.getLogBytes(taskInstanceId); + return ResponseEntity + .ok() + .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + System.currentTimeMillis() + ".queryLog" + "\"") + .body(logBytes); + } catch (Exception e) { + logger.error(Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg(), e); + return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg()); + } + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java new file mode 100644 index 0000000000..81911c62a1 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.SessionService; +import org.apache.dolphinscheduler.api.service.UsersService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.*; +import org.apache.commons.httpclient.HttpStatus; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import static org.apache.dolphinscheduler.api.enums.Status.*; + +/** + * user login controller + * + * swagger bootstrap ui docs refer : https://doc.xiaominfo.com/guide/enh-func.html + */ +@Api(tags = "LOGIN_TAG", position = 1) +@RestController +@RequestMapping("") +public class LoginController extends BaseController { + + private static final Logger logger = LoggerFactory.getLogger(LoginController.class); + + + @Autowired + private SessionService sessionService; + + @Autowired + private UsersService userService; + + + /** + * login + * + * @param userName + * @param userPassword + * @param request + * @param response + * @return + */ + @ApiOperation(value = "login", notes= "LOGIN_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", required = true, dataType ="String") + }) + @PostMapping(value = "/login") + public Result login(@RequestParam(value = "userName") String userName, + @RequestParam(value = "userPassword") String userPassword, + HttpServletRequest request, + HttpServletResponse response) { + + try { + logger.info("login user name: {} ", userName); + + //user name check + if (StringUtils.isEmpty(userName)) { + return error(Status.USER_NAME_NULL.getCode(), + Status.USER_NAME_NULL.getMsg()); + } + + // user ip check + String ip = getClientIpAddress(request); + if (StringUtils.isEmpty(ip)) { + return error(IP_IS_EMPTY.getCode(), IP_IS_EMPTY.getMsg()); + } + + // verify username and password + User user = userService.queryUser(userName, userPassword); + + if (user == null) { + return error(Status.USER_NAME_PASSWD_ERROR.getCode(),Status.USER_NAME_PASSWD_ERROR.getMsg() + ); + } + + // create session + String sessionId = sessionService.createSession(user, ip); + + if (sessionId == null) { + return error(Status.LOGIN_SESSION_FAILED.getCode(), + Status.LOGIN_SESSION_FAILED.getMsg() + ); + } + + response.setStatus(HttpStatus.SC_OK); + response.addCookie(new Cookie(Constants.SESSION_ID, sessionId)); + + logger.info("sessionId : {}" , sessionId); + return success(LOGIN_SUCCESS.getMsg(), sessionId); + } catch (Exception e) { + logger.error(USER_LOGIN_FAILURE.getMsg(),e); + return error(USER_LOGIN_FAILURE.getCode(), USER_LOGIN_FAILURE.getMsg()); + } + } + + /** + * sign out + * + * @param loginUser + * @return + */ + @ApiOperation(value = "signOut", notes = "SIGNOUT_NOTES") + @PostMapping(value = "/signOut") + public Result signOut(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + HttpServletRequest request) { + + try { + logger.info("login user:{} sign out", loginUser.getUserName()); + String ip = getClientIpAddress(request); + sessionService.signOut(ip, loginUser); + //clear session + request.removeAttribute(Constants.SESSION_USER); + return success(); + } catch (Exception e) { + logger.error(SIGN_OUT_ERROR.getMsg(),e); + return error(SIGN_OUT_ERROR.getCode(), SIGN_OUT_ERROR.getMsg()); + } + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java new file mode 100644 index 0000000000..396675b97d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.service.MonitorService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +import static org.apache.dolphinscheduler.api.enums.Status.*; +/** + * monitor controller + */ +@Api(tags = "MONITOR_TAG", position = 1) +@RestController +@RequestMapping("/monitor") +public class MonitorController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(MonitorController.class); + + @Autowired + private MonitorService monitorService; + + /** + * master list + * @param loginUser + * @return + */ + @ApiOperation(value = "listMaster", notes= "MASTER_LIST_NOTES") + @GetMapping(value = "/master/list") + @ResponseStatus(HttpStatus.OK) + public Result listMaster(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { + logger.info("login user: {}, query all master", loginUser.getUserName()); + try{ + logger.info("list master, user:{}", loginUser.getUserName()); + Map result = monitorService.queryMaster(loginUser); + return returnDataList(result); + }catch (Exception e){ + logger.error(LIST_MASTERS_ERROR.getMsg(),e); + return error(LIST_MASTERS_ERROR.getCode(), + LIST_MASTERS_ERROR.getMsg()); + } + } + + /** + * worker list + * @param loginUser + * @return + */ + @ApiOperation(value = "listWorker", notes= "WORKER_LIST_NOTES") + @GetMapping(value = "/worker/list") + @ResponseStatus(HttpStatus.OK) + public Result listWorker(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { + logger.info("login user: {}, query all workers", loginUser.getUserName()); + try{ + Map result = monitorService.queryWorker(loginUser); + return returnDataList(result); + }catch (Exception e){ + logger.error(LIST_WORKERS_ERROR.getMsg(),e); + return error(LIST_WORKERS_ERROR.getCode(), + LIST_WORKERS_ERROR.getMsg()); + } + } + + /** + * query database state + * @param loginUser + * @return + */ + @ApiOperation(value = "queryDatabaseState", notes= "QUERY_DATABASE_STATE_NOTES") + @GetMapping(value = "/database") + @ResponseStatus(HttpStatus.OK) + public Result queryDatabaseState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { + logger.info("login user: {}, query database state", loginUser.getUserName()); + try{ + + Map result = monitorService.queryDatabaseState(loginUser); + return returnDataList(result); + }catch (Exception e){ + logger.error(QUERY_DATABASE_STATE_ERROR.getMsg(),e); + return error(QUERY_DATABASE_STATE_ERROR.getCode(), + QUERY_DATABASE_STATE_ERROR.getMsg()); + } + } + + /** + * query zookeeper state + * @param loginUser + * @return + */ + @ApiOperation(value = "queryZookeeperState", notes= "QUERY_ZOOKEEPER_STATE_NOTES") + @GetMapping(value = "/zookeeper/list") + @ResponseStatus(HttpStatus.OK) + public Result queryZookeeperState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { + logger.info("login user: {}, query zookeeper state", loginUser.getUserName()); + try{ + Map result = monitorService.queryZookeeperState(loginUser); + return returnDataList(result); + }catch (Exception e){ + logger.error(QUERY_ZOOKEEPER_STATE_ERROR.getMsg(),e); + return error(QUERY_ZOOKEEPER_STATE_ERROR.getCode(), + QUERY_ZOOKEEPER_STATE_ERROR.getMsg()); + } + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java new file mode 100644 index 0000000000..5f3ab86d02 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java @@ -0,0 +1,491 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import javax.servlet.http.HttpServletResponse; +import java.util.Map; + + +/** + * process definition controller + */ +@Api(tags = "PROCESS_DEFINITION_TAG", position = 2) +@RestController +@RequestMapping("projects/{projectName}/process") +public class ProcessDefinitionController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionController.class); + + @Autowired + private ProcessDefinitionService processDefinitionService; + + /** + * create process definition + * + * @param loginUser + * @param projectName + * @param name + * @param json process definition json + * @param desc + * @return + */ + @ApiOperation(value = "save", notes= "CREATE_PROCESS_DEFINITION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), + @ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type ="String"), + @ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type ="String"), + @ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type ="String"), + @ApiImplicitParam(name = "desc", value = "PROCESS_DEFINITION_DESC", required = false, type ="String"), + }) + @PostMapping(value = "/save") + @ResponseStatus(HttpStatus.CREATED) + public Result createProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "name", required = true) String name, + @RequestParam(value = "processDefinitionJson", required = true) String json, + @RequestParam(value = "locations", required = true) String locations, + @RequestParam(value = "connects", required = true) String connects, + @RequestParam(value = "desc", required = false) String desc) { + + try { + logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " + + "process_definition_json: {}, desc: {} locations:{}, connects:{}", + loginUser.getUserName(), projectName, name, json, desc, locations, connects); + Map result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json, + desc, locations, connects); + return returnDataList(result); + } catch (Exception e) { + logger.error(Status.CREATE_PROCESS_DEFINITION.getMsg(), e); + return error(Status.CREATE_PROCESS_DEFINITION.getCode(), Status.CREATE_PROCESS_DEFINITION.getMsg()); + } + } + + /** + * verify process definition name unique + * + * @param loginUser + * @param projectName + * @param name + * @return + */ + @ApiOperation(value = "verify-name", notes = "VERIFY_PROCCESS_DEFINITION_NAME_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String") + }) + @GetMapping(value = "/verify-name") + @ResponseStatus(HttpStatus.OK) + public Result verifyProccessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @RequestParam(value = "name", required = true) String name){ + try { + logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}", + loginUser.getUserName(), projectName, name); + Map result = processDefinitionService.verifyProccessDefinitionName(loginUser, projectName, name); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg(),e); + return error(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getCode(), Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg()); + } + } + + /** + * update process definition + * + * @param loginUser + * @param projectName + * @param name + * @param id + * @param processDefinitionJson + * @param desc + * @return + */ + @ApiOperation(value = "updateProccessDefinition", notes= "UPDATE_PROCCESS_DEFINITION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), + @ApiImplicitParam(name = "id", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type ="String"), + @ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type ="String"), + @ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type ="String"), + @ApiImplicitParam(name = "desc", value = "PROCESS_DEFINITION_DESC", required = false, type ="String"), + }) + @PostMapping(value = "/update") + @ResponseStatus(HttpStatus.OK) + public Result updateProccessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @RequestParam(value = "name", required = true) String name, + @RequestParam(value = "id", required = true) int id, + @RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson, + @RequestParam(value = "locations", required = false) String locations, + @RequestParam(value = "connects", required = false) String connects, + @RequestParam(value = "desc", required = false) String desc) { + + try { + logger.info("login user {}, update process define, project name: {}, process define name: {}, " + + "process_definition_json: {}, desc: {}, locations:{}, connects:{}", + loginUser.getUserName(), projectName, name, processDefinitionJson,desc, locations, connects); + Map result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name, + processDefinitionJson, desc, locations, connects); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.UPDATE_PROCESS_DEFINITION_ERROR.getMsg(),e); + return error(Status.UPDATE_PROCESS_DEFINITION_ERROR.getCode(), Status.UPDATE_PROCESS_DEFINITION_ERROR.getMsg()); + } + } + + /** + * release process definition + * + * @param loginUser + * @param projectName + * @param processId + * @param releaseState + * @return + */ + @ApiOperation(value = "releaseProccessDefinition", notes= "RELEASE_PROCCESS_DEFINITION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), + @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "releaseState", value = "PROCESS_DEFINITION_CONNECTS", required = true, dataType = "Int", example = "100"), + }) + @PostMapping(value = "/release") + @ResponseStatus(HttpStatus.OK) + public Result releaseProccessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @RequestParam(value = "processId", required = true) int processId, + @RequestParam(value = "releaseState", required = true) int releaseState) { + + try { + logger.info("login user {}, release process definition, project name: {}, release state: {}", + loginUser.getUserName(), projectName, releaseState); + Map result = processDefinitionService.releaseProcessDefinition(loginUser, projectName, processId, releaseState); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.RELEASE_PROCESS_DEFINITION_ERROR.getMsg(),e); + return error(Status.RELEASE_PROCESS_DEFINITION_ERROR.getCode(), Status.RELEASE_PROCESS_DEFINITION_ERROR.getMsg()); + } + } + + + /** + * query datail of process definition + * + * @param loginUser + * @param projectName + * @param processId + * @return + */ + @ApiOperation(value = "queryProccessDefinitionById", notes= "QUERY_PROCCESS_DEFINITION_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") + }) + @GetMapping(value="/select-by-id") + @ResponseStatus(HttpStatus.OK) + public Result queryProccessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @RequestParam("processId") Integer processId + ){ + try{ + logger.info("query datail of process definition, login user:{}, project name:{}, process definition id:{}", + loginUser.getUserName(), projectName, processId); + Map result = processDefinitionService.queryProccessDefinitionById(loginUser, projectName, processId); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg(),e); + return error(Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getCode(), Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg()); + } + } + + + /** + * query proccess definition list + * + * @param loginUser + * @param projectName + * @return + */ + @ApiOperation(value = "queryProccessDefinitionList", notes= "QUERY_PROCCESS_DEFINITION_LIST_NOTES") + @GetMapping(value="/list") + @ResponseStatus(HttpStatus.OK) + public Result queryProccessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName + ){ + try{ + logger.info("query proccess definition list, login user:{}, project name:{}", + loginUser.getUserName(), projectName); + Map result = processDefinitionService.queryProccessDefinitionList(loginUser, projectName); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg(),e); + return error(Status.QUERY_PROCCESS_DEFINITION_LIST.getCode(), Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg()); + } + } + + /** + * query proccess definition list paging + * @param loginUser + * @param projectName + * @param pageNo + * @param pageSize + * @return + */ + @ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", required = false, type = "String"), + @ApiImplicitParam(name = "userId", value = "USER_ID", required = false, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100") + }) + @GetMapping(value="/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryProcessDefinitionListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam(value = "userId", required = false, defaultValue = "0") Integer userId, + @RequestParam("pageSize") Integer pageSize){ + try{ + logger.info("query proccess definition list paging, login user:{}, project name:{}", loginUser.getUserName(), projectName); + Map result = checkPageParams(pageNo, pageSize); + if(result.get(Constants.STATUS) != Status.SUCCESS){ + return returnDataListPaging(result); + } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = processDefinitionService.queryProcessDefinitionListPaging(loginUser, projectName, searchVal, pageNo, pageSize, userId); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(Status.QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg(),e); + return error(Status.QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getCode(), Status.QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg()); + } + } + + + /** + * encapsulation treeview structure + * + * @param loginUser + * @param projectName + * @param id + * @return + */ + @ApiOperation(value = "viewTree", notes= "VIEW_TREE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType = "Int", example = "100") + }) + @GetMapping(value="/view-tree") + @ResponseStatus(HttpStatus.OK) + public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @RequestParam("processId") Integer id, + @RequestParam("limit") Integer limit){ + try{ + Map result = processDefinitionService.viewTree(id, limit); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg(),e); + return error(Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getCode(), Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg()); + } + } + + + /** + * + * get tasks list by process definition id + * + * + * @param loginUser + * @param projectName + * @param processDefinitionId + * @return + */ + @ApiOperation(value = "getNodeListByDefinitionId", notes= "GET_NODE_LIST_BY_DEFINITION_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") + }) + @GetMapping(value="gen-task-list") + @ResponseStatus(HttpStatus.OK) + public Result getNodeListByDefinitionId( + @ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @RequestParam("processDefinitionId") Integer processDefinitionId){ + try { + logger.info("query task node name list by definitionId, login user:{}, project name:{}, id : {}", + loginUser.getUserName(), projectName, processDefinitionId); + Map result = processDefinitionService.getTaskNodeListByDefinitionId(processDefinitionId); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e); + return error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg()); + } + } + + /** + * + * get tasks list by process definition id + * + * + * @param loginUser + * @param projectName + * @param processDefinitionIdList + * @return + */ + @ApiOperation(value = "getNodeListByDefinitionIdList", notes= "GET_NODE_LIST_BY_DEFINITION_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionIdList", value = "PROCESS_DEFINITION_ID_LIST", required = true, type = "String") + }) + @GetMapping(value="get-task-list") + @ResponseStatus(HttpStatus.OK) + public Result getNodeListByDefinitionIdList( + @ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @RequestParam("processDefinitionIdList") String processDefinitionIdList){ + + try { + logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}", + loginUser.getUserName(), projectName, processDefinitionIdList); + Map result = processDefinitionService.getTaskNodeListByDefinitionIdList(processDefinitionIdList); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e); + return error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg()); + } + } + + /** + * delete process definition by id + * + * @param loginUser + * @param projectName + * @param processDefinitionId + * @return + */ + @ApiOperation(value = "deleteProcessDefinitionById", notes= "DELETE_PROCESS_DEFINITION_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100") + }) + @GetMapping(value="/delete") + @ResponseStatus(HttpStatus.OK) + public Result deleteProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processDefinitionId") Integer processDefinitionId + ){ + try{ + logger.info("delete process definition by id, login user:{}, project name:{}, process definition id:{}", + loginUser.getUserName(), projectName, processDefinitionId); + Map result = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getMsg(),e); + return error(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getCode(), Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getMsg()); + } + } + + /** + * batch delete process definition by ids + * + * @param loginUser + * @param projectName + * @param processDefinitionIds + * @return + */ + @ApiOperation(value = "batchDeleteProcessDefinitionByIds", notes= "BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", type = "String") + }) + @GetMapping(value="/batch-delete") + @ResponseStatus(HttpStatus.OK) + public Result batchDeleteProcessDefinitionByIds(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processDefinitionIds") String processDefinitionIds + ){ + try{ + logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}", + loginUser.getUserName(), projectName, processDefinitionIds); + Map result = processDefinitionService.batchDeleteProcessDefinitionByIds(loginUser, projectName, processDefinitionIds); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(),e); + return error(Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg()); + } + } + + /** + * export process definition by id + * + * @param loginUser + * @param projectName + * @param processDefinitionId + * @return + */ + @ApiOperation(value = "exportProcessDefinitionById", notes= "EXPORT_PROCCESS_DEFINITION_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") + }) + @GetMapping(value="/export") + @ResponseBody + public void exportProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @PathVariable String projectName, + @RequestParam("processDefinitionId") Integer processDefinitionId, + HttpServletResponse response){ + try{ + logger.info("export process definition by id, login user:{}, project name:{}, process definition id:{}", + loginUser.getUserName(), projectName, processDefinitionId); + processDefinitionService.exportProcessDefinitionById(loginUser, projectName, processDefinitionId,response); + }catch (Exception e){ + logger.error(Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR.getMsg(),e); + } + } + + + + /** + * query proccess definition all by project id + * + * @param loginUser + * @return + */ + @ApiOperation(value = "queryProccessDefinitionAllByProjectId", notes= "QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES") + @GetMapping(value="/queryProccessDefinitionAllByProjectId") + @ResponseStatus(HttpStatus.OK) + public Result queryProccessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("projectId") Integer projectId){ + try{ + logger.info("query proccess definition list, login user:{}, project id:{}", + loginUser.getUserName(),projectId); + Map result = processDefinitionService.queryProccessDefinitionAllByProjectId(projectId); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg(),e); + return error(Status.QUERY_PROCCESS_DEFINITION_LIST.getCode(), Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg()); + } + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java new file mode 100644 index 0000000000..04233985d8 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java @@ -0,0 +1,367 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ProcessInstanceService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.queue.ITaskQueue; +import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +import static org.apache.dolphinscheduler.api.enums.Status.*; + +/** + * process instance controller + */ +@Api(tags = "PROCESS_INSTANCE_TAG", position = 10) +@RestController +@RequestMapping("projects/{projectName}/instance") +public class ProcessInstanceController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceController.class); + + + @Autowired + ProcessInstanceService processInstanceService; + + /** + * query process instance list paging + * + * @param loginUser + * @param projectName + * @param pageNo + * @param pageSize + * @return + */ + @ApiOperation(value = "queryProcessInstanceList", notes= "QUERY_PROCESS_INSTANCE_LIST_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), + @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"), + @ApiImplicitParam(name = "host", value = "HOST", type ="String"), + @ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", type ="String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") + }) + @GetMapping(value="list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryProcessInstanceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processDefinitionId", required = false, defaultValue = "0") Integer processDefinitionId, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam(value = "stateType", required = false) ExecutionStatus stateType, + @RequestParam(value = "host", required = false) String host, + @RequestParam(value = "startDate", required = false) String startTime, + @RequestParam(value = "endDate", required = false) String endTime, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize){ + try{ + logger.info("query all process instance list, login user:{},project name:{}, define id:{}," + + "search value:{},state type:{},host:{},start time:{}, end time:{},page number:{}, page size:{}", + loginUser.getUserName(), projectName, processDefinitionId, searchVal, stateType,host, + startTime, endTime, pageNo, pageSize); + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = processInstanceService.queryProcessInstanceList( + loginUser, projectName, processDefinitionId, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg(),e); + return error(Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg()); + } + } + + /** + * query task list by process instance id + * + * @param loginUser + * @param projectName + * @param processInstanceId + * @return + */ + @ApiOperation(value = "queryTaskListByProcessId", notes= "QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") + }) + @GetMapping(value="/task-list-by-process-id") + @ResponseStatus(HttpStatus.OK) + public Result queryTaskListByProcessId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processInstanceId") Integer processInstanceId + ) { + try{ + logger.info("query task instance list by process instance id, login user:{}, project name:{}, process instance id:{}", + loginUser.getUserName(), projectName, processInstanceId); + Map result = processInstanceService.queryTaskListByProcessId(loginUser, projectName, processInstanceId); + return returnDataList(result); + }catch (Exception e){ + logger.error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg(),e); + return error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getCode(), QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg()); + } + } + + /** + * update process instance + * + * @param loginUser + * @param projectName + * @param processInstanceJson + * @param processInstanceId + * @param scheduleTime + * @param syncDefine + * @param flag + * @return + */ + @ApiOperation(value = "updateProcessInstance", notes= "UPDATE_PROCESS_INSTANCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processInstanceJson", value = "PROCESS_INSTANCE_JSON", type = "String"), + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", type = "String"), + @ApiImplicitParam(name = "syncDefine", value = "SYNC_DEFINE", type = "Boolean"), + @ApiImplicitParam(name = "locations", value = "PROCESS_INSTANCE_LOCATIONS", type = "String"), + @ApiImplicitParam(name = "connects", value = "PROCESS_INSTANCE_CONNECTS", type = "String"), + @ApiImplicitParam(name = "flag", value = "RECOVERY_PROCESS_INSTANCE_FLAG", type = "Flag"), + }) + @PostMapping(value="/update") + @ResponseStatus(HttpStatus.OK) + public Result updateProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam( value = "processInstanceJson", required = false) String processInstanceJson, + @RequestParam( value = "processInstanceId") Integer processInstanceId, + @RequestParam( value = "scheduleTime", required = false) String scheduleTime, + @RequestParam( value = "syncDefine", required = true) Boolean syncDefine, + @RequestParam(value = "locations", required = false) String locations, + @RequestParam(value = "connects", required = false) String connects, + @RequestParam( value = "flag", required = false) Flag flag + ){ + try{ + logger.info("updateProcessInstance process instance, login user:{}, project name:{}, process instance json:{}," + + "process instance id:{}, schedule time:{}, sync define:{}, flag:{}, locations:{}, connects:{}", + loginUser.getUserName(), projectName, processInstanceJson, processInstanceId, scheduleTime, + syncDefine, flag, locations, connects); + Map result = processInstanceService.updateProcessInstance(loginUser, projectName, + processInstanceId, processInstanceJson, scheduleTime, syncDefine, flag, locations, connects); + return returnDataList(result); + }catch (Exception e){ + logger.error(UPDATE_PROCESS_INSTANCE_ERROR.getMsg(),e); + return error(Status.UPDATE_PROCESS_INSTANCE_ERROR.getCode(), Status.UPDATE_PROCESS_INSTANCE_ERROR.getMsg()); + } + } + + /** + * query process instance by id + * + * @param loginUser + * @param projectName + * @param processInstanceId + * @return + */ + @ApiOperation(value = "queryProcessInstanceById", notes= "QUERY_PROCESS_INSTANCE_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") + }) + @GetMapping(value="/select-by-id") + @ResponseStatus(HttpStatus.OK) + public Result queryProcessInstanceById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processInstanceId") Integer processInstanceId + ){ + try{ + logger.info("query process instance detail by id, login user:{},project name:{}, process instance id:{}", + loginUser.getUserName(), projectName, processInstanceId); + Map result = processInstanceService.queryProcessInstanceById(loginUser, projectName, processInstanceId); + return returnDataList(result); + }catch (Exception e){ + logger.error(QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e); + return error(Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg()); + } + } + + /** + * delete process instance by id, at the same time, + * delete task instance and their mapping relation data + * + * @param loginUser + * @param projectName + * @param processInstanceId + * @return + */ + @ApiOperation(value = "deleteProcessInstanceById", notes= "DELETE_PROCESS_INSTANCE_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") + }) + @GetMapping(value="/delete") + @ResponseStatus(HttpStatus.OK) + public Result deleteProcessInstanceById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processInstanceId") Integer processInstanceId + ){ + try{ + logger.info("delete process instance by id, login user:{}, project name:{}, process instance id:{}", + loginUser.getUserName(), projectName, processInstanceId); + // task queue + ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); + Map result = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue); + return returnDataList(result); + }catch (Exception e){ + logger.error(DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e); + return error(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg()); + } + } + + /** + * query sub process instance detail info by task id + * + * @param loginUser + * @param projectName + * @param taskId + * @return + */ + @ApiOperation(value = "querySubProcessInstanceByTaskId", notes= "QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "taskId", value = "TASK_ID", dataType = "Int", example = "100") + }) + @GetMapping(value="/select-sub-process") + @ResponseStatus(HttpStatus.OK) + public Result querySubProcessInstanceByTaskId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("taskId") Integer taskId){ + try{ + Map result = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, taskId); + return returnDataList(result); + }catch (Exception e){ + logger.error(QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg(),e); + return error(Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getCode(), Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg()); + } + } + + /** + * query parent process instance detail info by sub process instance id + * + * @param loginUser + * @param projectName + * @param subId + * @return + */ + @ApiOperation(value = "queryParentInstanceBySubId", notes= "QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "subId", value = "SUB_PROCESS_INSTANCE_ID", dataType = "Int", example = "100") + }) + @GetMapping(value="/select-parent-process") + @ResponseStatus(HttpStatus.OK) + public Result queryParentInstanceBySubId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("subId") Integer subId){ + try{ + Map result = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, subId); + return returnDataList(result); + }catch (Exception e){ + logger.error(QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg(),e); + return error(Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getCode(), Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg()); + } + } + + /** + * query process instance global variables and local variables + * + * @param loginUser + * @param processInstanceId + * @return + */ + @ApiOperation(value = "viewVariables", notes= "QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") + }) + @GetMapping(value="/view-variables") + @ResponseStatus(HttpStatus.OK) + public Result viewVariables(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser + , @RequestParam("processInstanceId") Integer processInstanceId){ + try{ + Map result = processInstanceService.viewVariables(processInstanceId); + return returnDataList(result); + }catch (Exception e){ + logger.error(QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg(),e); + return error(Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg()); + } + } + + /** + * encapsulation gantt structure + * + * @param loginUser + * @param projectName + * @param processInstanceId + * @return + */ + @ApiOperation(value = "vieGanttTree", notes= "VIEW_GANTT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") + }) + @GetMapping(value="/view-gantt") + @ResponseStatus(HttpStatus.OK) + public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processInstanceId") Integer processInstanceId){ + try{ + Map result = processInstanceService.viewGantt(processInstanceId); + return returnDataList(result); + }catch (Exception e){ + logger.error(ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg(),e); + return error(Status.ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getCode(),ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg()); + } + } + + /** + * batch delete process instance by ids, at the same time, + * delete task instance and their mapping relation data + * + * @param loginUser + * @param projectName + * @param processInstanceIds + * @return + */ + @GetMapping(value="/batch-delete") + @ResponseStatus(HttpStatus.OK) + public Result batchDeleteProcessInstanceByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @PathVariable String projectName, + @RequestParam("processInstanceIds") String processInstanceIds + ){ + try{ + logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}", + loginUser.getUserName(), projectName, processInstanceIds); + Map result = processInstanceService.batchDeleteProcessInstanceByIds(loginUser, projectName, processInstanceIds); + return returnDataList(result); + }catch (Exception e){ + logger.error(BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg(),e); + return error(Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg()); + } + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java new file mode 100644 index 0000000000..d9927bbc2b --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java @@ -0,0 +1,304 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.multipart.MultipartFile; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +import static org.apache.dolphinscheduler.api.enums.Status.*; + +/** + * project controller + */ +@Api(tags = "PROJECT_TAG", position = 1) +@RestController +@RequestMapping("projects") +public class ProjectController extends BaseController { + + private static final Logger logger = LoggerFactory.getLogger(ProjectController.class); + + @Autowired + private ProjectService projectService; + + @Autowired + private ProcessDefinitionService processDefinitionService; + + /** + * create project + * + * @param loginUser + * @param projectName + * @param desc + * @return returns an error if it exists + */ + @ApiOperation(value = "createProject", notes= "CREATE_PROJECT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType ="String"), + @ApiImplicitParam(name = "desc", value = "PROJECT_DESC", dataType = "String") + }) + @PostMapping(value = "/create") + @ResponseStatus(HttpStatus.CREATED) + public Result createProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("projectName") String projectName, + @RequestParam(value = "desc", required = false) String desc) { + + try { + logger.info("login user {}, create project name: {}, desc: {}", loginUser.getUserName(), projectName, desc); + Map result = projectService.createProject(loginUser, projectName, desc); + return returnDataList(result); + } catch (Exception e) { + logger.error(CREATE_PROJECT_ERROR.getMsg(), e); + return error(CREATE_PROJECT_ERROR.getCode(), CREATE_PROJECT_ERROR.getMsg()); + } + } + + /** + * updateProcessInstance project + * + * @param loginUser + * @param projectId + * @param projectName + * @param desc + * @return + */ + @ApiOperation(value = "updateProject", notes= "UPDATE_PROJECT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100"), + @ApiImplicitParam(name = "projectName",value = "PROJECT_NAME",dataType = "String"), + @ApiImplicitParam(name = "desc", value = "PROJECT_DESC", dataType = "String") + }) + @PostMapping(value = "/update") + @ResponseStatus(HttpStatus.OK) + public Result updateProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("projectId") Integer projectId, + @RequestParam("projectName") String projectName, + @RequestParam(value = "desc", required = false) String desc) { + try { + logger.info("login user {} , updateProcessInstance project name: {}, desc: {}", loginUser.getUserName(), projectName, desc); + Map result = projectService.update(loginUser, projectId, projectName, desc); + return returnDataList(result); + } catch (Exception e) { + logger.error(UPDATE_PROJECT_ERROR.getMsg(), e); + return error(UPDATE_PROJECT_ERROR.getCode(), UPDATE_PROJECT_ERROR.getMsg()); + } + } + + /** + * query project details by id + * + * @param loginUser + * @param projectId + * @return + */ + @ApiOperation(value = "queryProjectById", notes= "QUERY_PROJECT_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + }) + @GetMapping(value = "/query-by-id") + @ResponseStatus(HttpStatus.OK) + public Result queryProjectById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("projectId") Integer projectId) { + logger.info("login user {}, query project by id: {}", loginUser.getUserName(), projectId); + + try { + Map result = projectService.queryById(projectId); + return returnDataList(result); + } catch (Exception e) { + logger.error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg(), e); + return error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getCode(), QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg()); + } + } + + /** + * query project list paging + * + * @param loginUser + * @param searchVal + * @param pageSize + * @param pageNo + * @return + */ + @ApiOperation(value = "queryProjectListPaging", notes= "QUERY_PROJECT_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "projectId", value = "PAGE_SIZE", dataType ="Int", example = "20"), + @ApiImplicitParam(name = "projectId", value = "PAGE_NO", dataType ="Int", example = "1") + }) + @GetMapping(value = "/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryProjectListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize, + @RequestParam("pageNo") Integer pageNo + ) { + + try { + logger.info("login user {}, query project list paging", loginUser.getUserName()); + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = projectService.queryProjectListPaging(loginUser, pageSize, pageNo, searchVal); + return returnDataListPaging(result); + } catch (Exception e) { + logger.error(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg(), e); + return error(Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getCode(), Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg()); + } + } + + /** + * delete project by id + * + * @param loginUser + * @param projectId + * @return + */ + @ApiOperation(value = "deleteProjectById", notes= "DELETE_PROJECT_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + }) + @GetMapping(value = "/delete") + @ResponseStatus(HttpStatus.OK) + public Result deleteProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("projectId") Integer projectId + ) { + + try { + logger.info("login user {}, delete project: {}.", loginUser.getUserName(), projectId); + Map result = projectService.deleteProject(loginUser, projectId); + return returnDataList(result); + } catch (Exception e) { + logger.error(DELETE_PROJECT_ERROR.getMsg(), e); + return error(DELETE_PROJECT_ERROR.getCode(), DELETE_PROJECT_ERROR.getMsg()); + } + } + + /** + * query unauthorized project + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "queryUnauthorizedProject", notes= "QUERY_UNAUTHORIZED_PROJECT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", dataType ="Int", example = "100") + }) + @GetMapping(value = "/unauth-project") + @ResponseStatus(HttpStatus.OK) + public Result queryUnauthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("userId") Integer userId) { + try { + logger.info("login user {}, query unauthorized project by user id: {}.", loginUser.getUserName(), userId); + Map result = projectService.queryUnauthorizedProject(loginUser, userId); + return returnDataList(result); + } catch (Exception e) { + logger.error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg(), e); + return error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getCode(), QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg()); + } + } + + + /** + * query authorized project + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "queryAuthorizedProject", notes= "QUERY_AUTHORIZED_PROJECT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", dataType ="Int", example = "100") + }) + @GetMapping(value = "/authed-project") + @ResponseStatus(HttpStatus.OK) + public Result queryAuthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("userId") Integer userId) { + try { + logger.info("login user {}, query authorized project by user id: {}.", loginUser.getUserName(), userId); + Map result = projectService.queryAuthorizedProject(loginUser, userId); + return returnDataList(result); + } catch (Exception e) { + logger.error(QUERY_AUTHORIZED_PROJECT.getMsg(), e); + return error(QUERY_AUTHORIZED_PROJECT.getCode(), QUERY_AUTHORIZED_PROJECT.getMsg()); + } + } + + /** + * import process definition + * + * @param loginUser + * @param file + * @return + */ + @ApiOperation(value = "importProcessDefinition", notes= "EXPORT_PROCCESS_DEFINITION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") + }) + @PostMapping(value="/importProcessDefinition") + public Result importProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("file") MultipartFile file){ + try{ + logger.info("import process definition by id, login user:{}", + loginUser.getUserName()); + Map result = processDefinitionService.importProcessDefinition(loginUser,file); + return returnDataList(result); + }catch (Exception e){ + logger.error(IMPORT_PROCESS_DEFINE_ERROR.getMsg(),e); + return error(IMPORT_PROCESS_DEFINE_ERROR.getCode(), IMPORT_PROCESS_DEFINE_ERROR.getMsg()); + } + } + + /** + * query all project list + * @param loginUser + * @return + */ + @ApiOperation(value = "queryAllProjectList", notes= "QUERY_ALL_PROJECT_LIST_NOTES") + @GetMapping(value = "/queryAllProjectList") + @ResponseStatus(HttpStatus.OK) + public Result queryAllProjectList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { + + try { + logger.info("login user {}, query all project list", loginUser.getUserName()); + Map result = projectService.queryAllProjectList(); + return returnDataList(result); + } catch (Exception e) { + logger.error(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg(), e); + return error(Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getCode(), Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg()); + } + } + + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java new file mode 100644 index 0000000000..c06c80808f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.QueueService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + + +/** + * queue controller + */ +@Api(tags = "QUEUE_TAG", position = 1) +@RestController +@RequestMapping("/queue") +public class QueueController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(QueueController.class); + + @Autowired + private QueueService queueService; + + + /** + * query queue list + * @param loginUser + * @return + */ + @ApiOperation(value = "queryList", notes= "QUERY_QUEUE_LIST_NOTES") + @GetMapping(value="/list") + @ResponseStatus(HttpStatus.OK) + public Result queryList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + try{ + logger.info("login user {}, query queue list", loginUser.getUserName()); + Map result = queueService.queryList(loginUser); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.QUERY_QUEUE_LIST_ERROR.getMsg(),e); + return error(Status.QUERY_QUEUE_LIST_ERROR.getCode(), Status.QUERY_QUEUE_LIST_ERROR.getMsg()); + } + } + + /** + * query queue list paging + * @param loginUser + * @return + */ + @ApiOperation(value = "queryQueueListPaging", notes= "QUERY_QUEUE_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + }) + @GetMapping(value="/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryQueueListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize){ + try{ + logger.info("login user {}, query queue list,search value:{}", loginUser.getUserName(),searchVal); + Map result = checkPageParams(pageNo, pageSize); + if(result.get(Constants.STATUS) != Status.SUCCESS){ + return returnDataListPaging(result); + } + + searchVal = ParameterUtils.handleEscapes(searchVal); + result = queueService.queryList(loginUser,searchVal,pageNo,pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(Status.QUERY_QUEUE_LIST_ERROR.getMsg(),e); + return error(Status.QUERY_QUEUE_LIST_ERROR.getCode(), Status.QUERY_QUEUE_LIST_ERROR.getMsg()); + } + } + + /** + * create queue + * + * @param loginUser + * @param queue + * @param queueName + * @return + */ + @ApiOperation(value = "createQueue", notes= "CREATE_QUEUE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true,dataType ="String"), + @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String") + }) + @PostMapping(value = "/create") + @ResponseStatus(HttpStatus.CREATED) + public Result createQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "queue") String queue, + @RequestParam(value = "queueName") String queueName) { + logger.info("login user {}, create queue, queue: {}, queueName: {}", + loginUser.getUserName(), queue, queueName); + try { + Map result = queueService.createQueue(loginUser,queue,queueName); + return returnDataList(result); + + }catch (Exception e){ + logger.error(Status.CREATE_QUEUE_ERROR.getMsg(),e); + return error(Status.CREATE_QUEUE_ERROR.getCode(), Status.CREATE_QUEUE_ERROR.getMsg()); + } + } + + /** + * update queue + * + * @param loginUser + * @param queue + * @param queueName + * @return + */ + @ApiOperation(value = "updateQueue", notes= "UPDATE_QUEUE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String") + }) + @PostMapping(value = "/update") + @ResponseStatus(HttpStatus.CREATED) + public Result updateQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int id, + @RequestParam(value = "queue") String queue, + @RequestParam(value = "queueName") String queueName) { + logger.info("login user {}, update queue, id: {}, queue: {}, queueName: {}", + loginUser.getUserName(), id,queue, queueName); + try { + Map result = queueService.updateQueue(loginUser,id,queue,queueName); + return returnDataList(result); + + }catch (Exception e){ + logger.error(Status.UPDATE_QUEUE_ERROR.getMsg(),e); + return error(Status.UPDATE_QUEUE_ERROR.getCode(), Status.UPDATE_QUEUE_ERROR.getMsg()); + } + } + + /** + * verify queue and queue name + * + * @param loginUser + * @param queue + * @param queueName + * @return + */ + @ApiOperation(value = "verifyQueue", notes= "VERIFY_QUEUE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String") + }) + @PostMapping(value = "/verify-queue") + @ResponseStatus(HttpStatus.OK) + public Result verifyQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="queue") String queue, + @RequestParam(value ="queueName") String queueName + ) { + + try{ + logger.info("login user {}, verfiy queue: {} queue name: {}", + loginUser.getUserName(),queue,queueName); + return queueService.verifyQueue(queue,queueName); + }catch (Exception e){ + logger.error(Status.VERIFY_QUEUE_ERROR.getMsg(),e); + return error(Status.VERIFY_QUEUE_ERROR.getCode(), Status.VERIFY_QUEUE_ERROR.getMsg()); + } + } + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java new file mode 100644 index 0000000000..0070b50952 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java @@ -0,0 +1,714 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ResourcesService; +import org.apache.dolphinscheduler.api.service.UdfFuncService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.common.enums.UdfType; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.io.Resource; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.multipart.MultipartFile; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +import static org.apache.dolphinscheduler.api.enums.Status.*; +/** + * resources controller + */ +@Api(tags = "RESOURCES_TAG", position = 1) +@RestController +@RequestMapping("resources") +public class ResourcesController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(ResourcesController.class); + + + @Autowired + private ResourcesService resourceService; + @Autowired + private UdfFuncService udfFuncService; + + /** + * create resource + * + * @param loginUser + * @param alias + * @param desc + * @param file + */ + @ApiOperation(value = "createResource", notes= "CREATE_RESOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), + @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "des", value = "RESOURCE_DESC", dataType ="String"), + @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") + }) + @PostMapping(value = "/create") + public Result createResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") ResourceType type, + @RequestParam(value ="name")String alias, + @RequestParam(value = "desc", required = false) String desc, + @RequestParam("file") MultipartFile file) { + try { + logger.info("login user {}, create resource, type: {}, resource alias: {}, desc: {}, file: {},{}", + loginUser.getUserName(),type, alias, desc, file.getName(), file.getOriginalFilename()); + return resourceService.createResource(loginUser,alias, desc,type ,file); + } catch (Exception e) { + logger.error(CREATE_RESOURCE_ERROR.getMsg(),e); + return error(CREATE_RESOURCE_ERROR.getCode(), CREATE_RESOURCE_ERROR.getMsg()); + } + } + + /** + * update resource + * + * @param loginUser + * @param alias + * @param desc + */ + @ApiOperation(value = "createResource", notes= "CREATE_RESOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), + @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "des", value = "RESOURCE_DESC", dataType ="String"), + @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true,dataType = "MultipartFile") + }) + @PostMapping(value = "/update") + public Result updateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="id") int resourceId, + @RequestParam(value = "type") ResourceType type, + @RequestParam(value ="name")String alias, + @RequestParam(value = "desc", required = false) String desc) { + try { + logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}", + loginUser.getUserName(),type, alias, desc); + return resourceService.updateResource(loginUser,resourceId,alias, desc,type); + } catch (Exception e) { + logger.error(UPDATE_RESOURCE_ERROR.getMsg(),e); + return error(Status.UPDATE_RESOURCE_ERROR.getCode(), Status.UPDATE_RESOURCE_ERROR.getMsg()); + } + } + + /** + * query resources list + * + * @param loginUser + * @return + */ + @ApiOperation(value = "querytResourceList", notes= "QUERY_RESOURCE_LIST_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType") + }) + @GetMapping(value="/list") + @ResponseStatus(HttpStatus.OK) + public Result querytResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="type") ResourceType type + ){ + try{ + logger.info("query resource list, login user:{}, resource type:{}", loginUser.getUserName(), type.toString()); + Map result = resourceService.queryResourceList(loginUser, type); + return returnDataList(result); + }catch (Exception e){ + logger.error(QUERY_RESOURCES_LIST_ERROR.getMsg(),e); + return error(Status.QUERY_RESOURCES_LIST_ERROR.getCode(), Status.QUERY_RESOURCES_LIST_ERROR.getMsg()); + } + } + + /** + * query resources list paging + * + * @param loginUser + * @param pageNo + * @param pageSize + * @return + */ + @ApiOperation(value = "querytResourceListPaging", notes= "QUERY_RESOURCE_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + }) + @GetMapping(value="/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result querytResourceListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="type") ResourceType type, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize + ){ + try{ + logger.info("query resource list, login user:{}, resource type:{}, search value:{}", + loginUser.getUserName(), type.toString(), searchVal); + Map result = checkPageParams(pageNo, pageSize); + if(result.get(Constants.STATUS) != Status.SUCCESS){ + return returnDataListPaging(result); + } + + searchVal = ParameterUtils.handleEscapes(searchVal); + result = resourceService.queryResourceListPaging(loginUser,type,searchVal,pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(QUERY_RESOURCES_LIST_PAGING.getMsg(),e); + return error(Status.QUERY_RESOURCES_LIST_PAGING.getCode(), Status.QUERY_RESOURCES_LIST_PAGING.getMsg()); + } + } + + + /** + * delete resource + * + * @param loginUser + * @param resourceId + */ + @ApiOperation(value = "deleteResource", notes= "DELETE_RESOURCE_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/delete") + @ResponseStatus(HttpStatus.OK) + public Result deleteResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="id") int resourceId + ) { + try{ + logger.info("login user {}, delete resource id: {}", + loginUser.getUserName(),resourceId); + return resourceService.delete(loginUser,resourceId); + }catch (Exception e){ + logger.error(DELETE_RESOURCE_ERROR.getMsg(),e); + return error(Status.DELETE_RESOURCE_ERROR.getCode(), Status.DELETE_RESOURCE_ERROR.getMsg()); + } + } + + + /** + * verify resource by alias and type + * + * @param loginUser + * @param alias + * @param type + * @return + */ + @ApiOperation(value = "verifyResourceName", notes= "VERIFY_RESOURCE_NAME_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), + @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType ="String") + }) + @GetMapping(value = "/verify-name") + @ResponseStatus(HttpStatus.OK) + public Result verifyResourceName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="name") String alias, + @RequestParam(value ="type") ResourceType type + ) { + try { + logger.info("login user {}, verfiy resource alias: {},resource type: {}", + loginUser.getUserName(), alias,type); + + return resourceService.verifyResourceName(alias,type,loginUser); + } catch (Exception e) { + logger.error(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg(), e); + return error(Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getCode(), Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg()); + } + } + + /** + * view resource file online + * + * @param loginUser + * @param resourceId + */ + @ApiOperation(value = "viewResource", notes= "VIEW_RESOURCE_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/view") + public Result viewResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int resourceId, + @RequestParam(value = "skipLineNum") int skipLineNum, + @RequestParam(value = "limit") int limit + ) { + try{ + logger.info("login user {}, view resource : {}, skipLineNum {} , limit {}", + loginUser.getUserName(),resourceId,skipLineNum,limit); + + return resourceService.readResource(resourceId,skipLineNum,limit); + }catch (Exception e){ + logger.error(VIEW_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); + return error(Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); + } + } + + /** + * create resource file online + * + * @param loginUser + * @param type + * @param fileName + * @param fileSuffix + * @param desc + * @param content + * @return + */ + @ApiOperation(value = "onlineCreateResource", notes= "ONLINE_CREATE_RESOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), + @ApiImplicitParam(name = "fileName", value = "RESOURCE_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "suffix", value = "SUFFIX", required = true, dataType ="String"), + @ApiImplicitParam(name = "des", value = "RESOURCE_DESC", dataType ="String"), + @ApiImplicitParam(name = "content", value = "CONTENT",required = true, dataType ="String") + }) + @PostMapping(value = "/online-create") + public Result onlineCreateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") ResourceType type, + @RequestParam(value ="fileName")String fileName, + @RequestParam(value ="suffix")String fileSuffix, + @RequestParam(value = "desc", required = false) String desc, + @RequestParam(value = "content") String content + ) { + try{ + logger.info("login user {}, online create resource! fileName : {}, type : {}, suffix : {},desc : {},content : {}", + loginUser.getUserName(),type,fileName,fileSuffix,desc,content); + if(StringUtils.isEmpty(content)){ + logger.error("resource file contents are not allowed to be empty"); + return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); + } + return resourceService.onlineCreateResource(loginUser,type,fileName,fileSuffix,desc,content); + }catch (Exception e){ + logger.error(CREATE_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); + return error(Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); + } + } + + /** + * edit resource file online + * + * @param loginUser + * @param resourceId + */ + @ApiOperation(value = "updateResourceContent", notes= "UPDATE_RESOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "content", value = "CONTENT",required = true, dataType ="String") + }) + @PostMapping(value = "/update-content") + public Result updateResourceContent(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int resourceId, + @RequestParam(value = "content") String content + ) { + try{ + logger.info("login user {}, updateProcessInstance resource : {}", + loginUser.getUserName(),resourceId); + if(StringUtils.isEmpty(content)){ + logger.error("The resource file contents are not allowed to be empty"); + return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); + } + return resourceService.updateResourceContent(resourceId,content); + }catch (Exception e){ + logger.error(EDIT_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); + return error(Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); + } + } + + /** + * download resource file + * + * @param loginUser + * @param resourceId + */ + @ApiOperation(value = "downloadResource", notes= "DOWNLOAD_RESOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/download") + @ResponseBody + public ResponseEntity downloadResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int resourceId) { + try{ + logger.info("login user {}, download resource : {}", + loginUser.getUserName(), resourceId); + Resource file = resourceService.downloadResource(resourceId); + if (file == null) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.RESOURCE_NOT_EXIST.getMsg()); + } + return ResponseEntity + .ok() + .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + file.getFilename() + "\"") + .body(file); + }catch (Exception e){ + logger.error(DOWNLOAD_RESOURCE_FILE_ERROR.getMsg(),e); + return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.DOWNLOAD_RESOURCE_FILE_ERROR.getMsg()); + } + } + + + /** + * create udf function + * @param loginUser + * @param type + * @param funcName + * @param argTypes + * @param database + * @param desc + * @param resourceId + * @return + */ + @ApiOperation(value = "createUdfFunc", notes= "CREATE_UDF_FUNCTION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType ="UdfType"), + @ApiImplicitParam(name = "funcName", value = "FUNC_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType ="String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType ="String"), + @ApiImplicitParam(name = "desc", value = "UDF_DESC", dataType ="String"), + @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + + }) + @PostMapping(value = "/udf-func/create") + @ResponseStatus(HttpStatus.CREATED) + public Result createUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") UdfType type, + @RequestParam(value ="funcName")String funcName, + @RequestParam(value ="className")String className, + @RequestParam(value ="argTypes", required = false)String argTypes, + @RequestParam(value ="database", required = false)String database, + @RequestParam(value = "desc", required = false) String desc, + @RequestParam(value = "resourceId") int resourceId) { + logger.info("login user {}, create udf function, type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}", + loginUser.getUserName(),type, funcName, argTypes,database,desc, resourceId); + Result result = new Result(); + + try { + return udfFuncService.createUdfFunction(loginUser,funcName,className,argTypes,database,desc,type,resourceId); + } catch (Exception e) { + logger.error(CREATE_UDF_FUNCTION_ERROR.getMsg(),e); + return error(Status.CREATE_UDF_FUNCTION_ERROR.getCode(), Status.CREATE_UDF_FUNCTION_ERROR.getMsg()); + } + } + + /** + * view udf function + * + * @param loginUser + * @param id + * @return + */ + @ApiOperation(value = "viewUIUdfFunction", notes= "VIEW_UDF_FUNCTION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + + }) + @GetMapping(value = "/udf-func/update-ui") + @ResponseStatus(HttpStatus.OK) + public Result viewUIUdfFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("id") int id) + { + Result result = new Result(); + logger.info("login user {}, query udf{}", + loginUser.getUserName(), id); + try { + Map map = udfFuncService.queryUdfFuncDetail(id); + return returnDataList(map); + } catch (Exception e) { + logger.error(VIEW_UDF_FUNCTION_ERROR.getMsg(),e); + return error(Status.VIEW_UDF_FUNCTION_ERROR.getCode(), Status.VIEW_UDF_FUNCTION_ERROR.getMsg()); + } + } + + /** + * updateProcessInstance udf function + * + * @param loginUser + * @param type + * @param funcName + * @param argTypes + * @param database + * @param desc + * @param resourceId + * @return + */ + @ApiOperation(value = "updateUdfFunc", notes= "UPDATE_UDF_FUNCTION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType ="UdfType"), + @ApiImplicitParam(name = "funcName", value = "FUNC_NAME",required = true, dataType ="String"), + @ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType ="String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType ="String"), + @ApiImplicitParam(name = "desc", value = "UDF_DESC", dataType ="String"), + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + + }) + @PostMapping(value = "/udf-func/update") + public Result updateUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int udfFuncId, + @RequestParam(value = "type") UdfType type, + @RequestParam(value ="funcName")String funcName, + @RequestParam(value ="className")String className, + @RequestParam(value ="argTypes", required = false)String argTypes, + @RequestParam(value ="database", required = false)String database, + @RequestParam(value = "desc", required = false) String desc, + @RequestParam(value = "resourceId") int resourceId) { + try { + logger.info("login user {}, updateProcessInstance udf function id: {},type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}", + loginUser.getUserName(),udfFuncId,type, funcName, argTypes,database,desc, resourceId); + Map result = udfFuncService.updateUdfFunc(udfFuncId,funcName,className,argTypes,database,desc,type,resourceId); + return returnDataList(result); + } catch (Exception e) { + logger.error(UPDATE_UDF_FUNCTION_ERROR.getMsg(),e); + return error(Status.UPDATE_UDF_FUNCTION_ERROR.getCode(), Status.UPDATE_UDF_FUNCTION_ERROR.getMsg()); + } + } + + /** + * query udf function list paging + * + * @param loginUser + * @param pageNo + * @param pageSize + * @return + */ + @ApiOperation(value = "queryUdfFuncListPaging", notes= "QUERY_UDF_FUNCTION_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + }) + @GetMapping(value="/udf-func/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize + ){ + try{ + logger.info("query udf functions list, login user:{},search value:{}", + loginUser.getUserName(), searchVal); + Map result = checkPageParams(pageNo, pageSize); + if(result.get(Constants.STATUS) != Status.SUCCESS){ + return returnDataListPaging(result); + } + + result = udfFuncService.queryUdfFuncListPaging(loginUser,searchVal,pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getMsg(),e); + return error(Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getCode(), Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getMsg()); + } + } + + /** + * query resource list by type + * + * @param loginUser + * @return + */ + @ApiOperation(value = "queryResourceList", notes= "QUERY_RESOURCE_LIST_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType ="UdfType") + }) + @GetMapping(value="/udf-func/list") + @ResponseStatus(HttpStatus.OK) + public Result queryResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("type") UdfType type){ + try{ + logger.info("query datasource list, user:{}, type:{}", loginUser.getUserName(), type.toString()); + Map result = udfFuncService.queryResourceList(loginUser,type.ordinal()); + return returnDataList(result); + }catch (Exception e){ + logger.error(QUERY_DATASOURCE_BY_TYPE_ERROR.getMsg(),e); + return error(Status.QUERY_DATASOURCE_BY_TYPE_ERROR.getCode(),QUERY_DATASOURCE_BY_TYPE_ERROR.getMsg()); + } + } + + /** + * verify udf function name can use or not + * + * @param loginUser + * @param name + * @return + */ + @ApiOperation(value = "verifyUdfFuncName", notes= "VERIFY_UDF_FUNCTION_NAME_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "FUNC_NAME",required = true, dataType ="String") + + }) + @GetMapping(value = "/udf-func/verify-name") + @ResponseStatus(HttpStatus.OK) + public Result verifyUdfFuncName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="name") String name + ) { + logger.info("login user {}, verfiy udf function name: {}", + loginUser.getUserName(),name); + + try{ + + return udfFuncService.verifyUdfFuncByName(name); + }catch (Exception e){ + logger.error(VERIFY_UDF_FUNCTION_NAME_ERROR.getMsg(),e); + return error(Status.VERIFY_UDF_FUNCTION_NAME_ERROR.getCode(), Status.VERIFY_UDF_FUNCTION_NAME_ERROR.getMsg()); + } + } + + /** + * delete udf function + * + * @param loginUser + * @param udfFuncId + */ + @ApiOperation(value = "deleteUdfFunc", notes= "DELETE_UDF_FUNCTION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/udf-func/delete") + @ResponseStatus(HttpStatus.OK) + public Result deleteUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="id") int udfFuncId + ) { + try{ + + logger.info("login user {}, delete udf function id: {}", loginUser.getUserName(),udfFuncId); + return udfFuncService.delete(udfFuncId); + }catch (Exception e){ + logger.error(DELETE_UDF_FUNCTION_ERROR.getMsg(),e); + return error(Status.DELETE_UDF_FUNCTION_ERROR.getCode(), Status.DELETE_UDF_FUNCTION_ERROR.getMsg()); + } + } + + /** + * authorized file resource list + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "authorizedFile", notes= "AUTHORIZED_FILE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/authed-file") + @ResponseStatus(HttpStatus.CREATED) + public Result authorizedFile(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("userId") Integer userId) { + try{ + logger.info("authorized file resource, user: {}, user id:{}", loginUser.getUserName(), userId); + Map result = resourceService.authorizedFile(loginUser, userId); + return returnDataList(result); + }catch (Exception e){ + logger.error(AUTHORIZED_FILE_RESOURCE_ERROR.getMsg(),e); + return error(Status.AUTHORIZED_FILE_RESOURCE_ERROR.getCode(), Status.AUTHORIZED_FILE_RESOURCE_ERROR.getMsg()); + } + } + + + /** + * unauthorized file resource list + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "unauthorizedFile", notes= "UNAUTHORIZED_FILE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/unauth-file") + @ResponseStatus(HttpStatus.CREATED) + public Result unauthorizedFile(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("userId") Integer userId) { + try{ + logger.info("resource unauthorized file, user:{}, unauthorized user id:{}", loginUser.getUserName(), userId); + Map result = resourceService.unauthorizedFile(loginUser, userId); + return returnDataList(result); + }catch (Exception e){ + logger.error(UNAUTHORIZED_FILE_RESOURCE_ERROR.getMsg(),e); + return error(Status.UNAUTHORIZED_FILE_RESOURCE_ERROR.getCode(), Status.UNAUTHORIZED_FILE_RESOURCE_ERROR.getMsg()); + } + } + + + /** + * unauthorized udf function + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "unauthUDFFunc", notes= "UNAUTHORIZED_UDF_FUNC_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/unauth-udf-func") + @ResponseStatus(HttpStatus.CREATED) + public Result unauthUDFFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("userId") Integer userId) { + try{ + logger.info("unauthorized udf function, login user:{}, unauthorized user id:{}", loginUser.getUserName(), userId); + + Map result = resourceService.unauthorizedUDFFunction(loginUser, userId); + return returnDataList(result); + }catch (Exception e){ + logger.error(UNAUTHORIZED_UDF_FUNCTION_ERROR.getMsg(),e); + return error(Status.UNAUTHORIZED_UDF_FUNCTION_ERROR.getCode(), Status.UNAUTHORIZED_UDF_FUNCTION_ERROR.getMsg()); + } + } + + + /** + * authorized udf function + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "authUDFFunc", notes= "AUTHORIZED_UDF_FUNC_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + }) + @GetMapping(value = "/authed-udf-func") + @ResponseStatus(HttpStatus.CREATED) + public Result authorizedUDFFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("userId") Integer userId) { + try{ + logger.info("auth udf function, login user:{}, auth user id:{}", loginUser.getUserName(), userId); + Map result = resourceService.authorizedUDFFunction(loginUser, userId); + return returnDataList(result); + }catch (Exception e){ + logger.error(AUTHORIZED_UDF_FUNCTION_ERROR.getMsg(),e); + return error(Status.AUTHORIZED_UDF_FUNCTION_ERROR.getCode(), Status.AUTHORIZED_UDF_FUNCTION_ERROR.getMsg()); + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java new file mode 100644 index 0000000000..1d436d7d3d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java @@ -0,0 +1,336 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.SchedulerService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +import static org.apache.dolphinscheduler.api.utils.Constants.SESSION_USER; +import static org.apache.dolphinscheduler.api.enums.Status.*; + +/** + * schedule controller + */ +@Api(tags = "SCHEDULER_TAG", position = 13) +@RestController +@RequestMapping("/projects/{projectName}/schedule") +public class SchedulerController extends BaseController { + + private static final Logger logger = LoggerFactory.getLogger(SchedulerController.class); + public static final String DEFAULT_WARNING_TYPE = "NONE"; + public static final String DEFAULT_NOTIFY_GROUP_ID = "1"; + public static final String DEFAULT_FAILURE_POLICY = "CONTINUE"; + + + @Autowired + private SchedulerService schedulerService; + + + /** + * create schedule + * + * @param loginUser + * @param projectName + * @param processDefinitionId + * @param schedule + * @param warningType + * @param warningGroupId + * @param failureStrategy + * @return + */ + @ApiOperation(value = "createSchedule", notes= "CREATE_SCHEDULE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type ="WarningType"), + @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type ="FailureStrategy"), + @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type ="String"), + @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type ="String"), + @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type ="Priority"), + }) + @PostMapping("/create") + @ResponseStatus(HttpStatus.CREATED) + public Result createSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processDefinitionId") Integer processDefinitionId, + @RequestParam(value = "schedule") String schedule, + @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, + @RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId, + @RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy, + @RequestParam(value = "receivers", required = false) String receivers, + @RequestParam(value = "receiversCc", required = false) String receiversCc, + @RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId, + @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { + logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," + + "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}, workGroupId:{}", + loginUser.getUserName(), projectName, processDefinitionId, schedule, warningType, warningGroupId, + failureStrategy, receivers, receiversCc, processInstancePriority, workerGroupId); + try { + Map result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule, + warningType, warningGroupId, failureStrategy, receivers, receiversCc, processInstancePriority, workerGroupId); + + return returnDataList(result); + } catch (Exception e) { + logger.error(CREATE_SCHEDULE_ERROR.getMsg(), e); + return error(CREATE_SCHEDULE_ERROR.getCode(), CREATE_SCHEDULE_ERROR.getMsg()); + } + } + + /** + * updateProcessInstance schedule + * + * @param loginUser + * @param projectName + * @param id + * @param schedule + * @param warningType + * @param warningGroupId + * @param failureStrategy + * @return + */ + @ApiOperation(value = "updateSchedule", notes= "UPDATE_SCHEDULE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type ="WarningType"), + @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type ="FailureStrategy"), + @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type ="String"), + @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type ="String"), + @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type ="Priority"), + }) + @PostMapping("/update") + public Result updateSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "id") Integer id, + @RequestParam(value = "schedule") String schedule, + @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, + @RequestParam(value = "warningGroupId", required = false) int warningGroupId, + @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, + @RequestParam(value = "receivers", required = false) String receivers, + @RequestParam(value = "receiversCc", required = false) String receiversCc, + @RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId, + @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { + logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " + + "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {},workerGroupId:{}", + loginUser.getUserName(), projectName, id, schedule, warningType, warningGroupId, failureStrategy, + receivers, receiversCc, processInstancePriority, workerGroupId); + + try { + Map result = schedulerService.updateSchedule(loginUser, projectName, id, schedule, + warningType, warningGroupId, failureStrategy, receivers, receiversCc, null, processInstancePriority, workerGroupId); + return returnDataList(result); + + } catch (Exception e) { + logger.error(UPDATE_SCHEDULE_ERROR.getMsg(), e); + return error(Status.UPDATE_SCHEDULE_ERROR.getCode(), Status.UPDATE_SCHEDULE_ERROR.getMsg()); + } + } + + /** + * publish schedule setScheduleState + * + * @param loginUser + * @param projectName + * @param id + * @return + * @throws Exception + */ + @ApiOperation(value = "online", notes= "ONLINE_SCHEDULE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") + }) + @PostMapping("/online") + public Result online(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName, + @RequestParam("id") Integer id) { + logger.info("login user {}, schedule setScheduleState, project name: {}, id: {}", + loginUser.getUserName(), projectName, id); + try { + Map result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.ONLINE); + return returnDataList(result); + + } catch (Exception e) { + logger.error(PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg(), e); + return error(Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getCode(), Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg()); + } + } + + /** + * offline schedule + * + * @param loginUser + * @param projectName + * @param id + * @return + */ + @ApiOperation(value = "offline", notes= "OFFLINE_SCHEDULE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") + }) + @PostMapping("/offline") + public Result offline(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName, + @RequestParam("id") Integer id) { + logger.info("login user {}, schedule offline, project name: {}, process definition id: {}", + loginUser.getUserName(), projectName, id); + + try { + Map result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.OFFLINE); + return returnDataList(result); + + } catch (Exception e) { + logger.error(OFFLINE_SCHEDULE_ERROR.getMsg(), e); + return error(Status.OFFLINE_SCHEDULE_ERROR.getCode(), Status.OFFLINE_SCHEDULE_ERROR.getMsg()); + } + } + + /** + * query schedule list paging + * + * @param loginUser + * @param projectName + * @param processDefinitionId + * @return + */ + @ApiOperation(value = "queryScheduleListPaging", notes= "QUERY_SCHEDULE_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true,dataType = "Int", example = "100"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") + + }) + @GetMapping("/list-paging") + public Result queryScheduleListPaging(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam Integer processDefinitionId, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + logger.info("login user {}, query schedule, project name: {}, process definition id: {}", + loginUser.getUserName(), projectName, processDefinitionId); + try { + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg(),e); + return error(Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg()); + } + + } + + /** + * delete schedule by id + * + * @param loginUser + * @param projectName + * @param scheduleId + * @return + */ + @ApiOperation(value = "deleteScheduleById", notes= "OFFLINE_SCHEDULE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "scheduleId", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") + }) + @GetMapping(value="/delete") + @ResponseStatus(HttpStatus.OK) + public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser, + @PathVariable String projectName, + @RequestParam("scheduleId") Integer scheduleId + ){ + try{ + logger.info("delete schedule by id, login user:{}, project name:{}, schedule id:{}", + loginUser.getUserName(), projectName, scheduleId); + Map result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId); + return returnDataList(result); + }catch (Exception e){ + logger.error(DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg(),e); + return error(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getCode(), Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg()); + } + } + /** + * query schedule list + * + * @param loginUser + * @param projectName + * @return + */ + @ApiOperation(value = "queryScheduleList", notes= "QUERY_SCHEDULE_LIST_NOTES") + @PostMapping("/list") + public Result queryScheduleList(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName) { + try { + logger.info("login user {}, query schedule list, project name: {}", + loginUser.getUserName(), projectName); + Map result = schedulerService.queryScheduleList(loginUser, projectName); + return returnDataList(result); + } catch (Exception e) { + logger.error(QUERY_SCHEDULE_LIST_ERROR.getMsg(), e); + return error(Status.QUERY_SCHEDULE_LIST_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_ERROR.getMsg()); + } + } + + /** + * preview schedule + * + * @param loginUser + * @param projectName + * @param schedule + * @return + */ + @ApiOperation(value = "previewSchedule", notes= "PREVIEW_SCHEDULE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), + }) + @PostMapping("/preview") + @ResponseStatus(HttpStatus.CREATED) + public Result previewSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "schedule") String schedule + ){ + logger.info("login user {}, project name: {}, preview schedule: {}", + loginUser.getUserName(), projectName, schedule); + try { + Map result = schedulerService.previewSchedule(loginUser, projectName, schedule); + return returnDataList(result); + } catch (Exception e) { + logger.error(PREVIEW_SCHEDULE_ERROR.getMsg(), e); + return error(PREVIEW_SCHEDULE_ERROR.getCode(), PREVIEW_SCHEDULE_ERROR.getMsg()); + } + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java new file mode 100644 index 0000000000..99213358cd --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.service.TaskInstanceService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.*; +import org.apache.dolphinscheduler.api.enums.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +/** + * task instance controller + */ +@Api(tags = "TASK_INSTANCE_TAG", position = 11) +@RestController +@RequestMapping("/projects/{projectName}/task-instance") +public class TaskInstanceController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(TaskInstanceController.class); + + @Autowired + TaskInstanceService taskInstanceService; + + + /** + * query task list paging + * + * @param loginUser + * @return + */ + @ApiOperation(value = "queryTaskListPaging", notes= "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID",required = false, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), + @ApiImplicitParam(name = "taskName", value = "TASK_NAME", type ="String"), + @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"), + @ApiImplicitParam(name = "host", value = "HOST", type ="String"), + @ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", type ="String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") + }) + @GetMapping("/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryTaskListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam(value = "taskName", required = false) String taskName, + @RequestParam(value = "stateType", required = false) ExecutionStatus stateType, + @RequestParam(value = "host", required = false) String host, + @RequestParam(value = "startDate", required = false) String startTime, + @RequestParam(value = "endDate", required = false) String endTime, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize){ + + try{ + logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, state type:{}, host:{}, start:{}, end:{}", + projectName, processInstanceId, searchVal, taskName, stateType, host, startTime, endTime); + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = taskInstanceService.queryTaskListPaging( + loginUser, projectName, processInstanceId, taskName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(Status.QUERY_TASK_LIST_PAGING_ERROR.getMsg(),e); + return error(Status.QUERY_TASK_LIST_PAGING_ERROR.getCode(), Status.QUERY_TASK_LIST_PAGING_ERROR.getMsg()); + } + + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java new file mode 100644 index 0000000000..dabcf62b80 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.service.TaskRecordService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.api.enums.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +/** + * data quality controller + */ +@ApiIgnore +@RestController +@RequestMapping("/projects/task-record") +public class TaskRecordController extends BaseController{ + + + private static final Logger logger = LoggerFactory.getLogger(TaskRecordController.class); + + + @Autowired + TaskRecordService taskRecordService; + + /** + * query task record list paging + * + * @param loginUser + * @return + */ + @GetMapping("/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "taskName", required = false) String taskName, + @RequestParam(value = "state", required = false) String state, + @RequestParam(value = "sourceTable", required = false) String sourceTable, + @RequestParam(value = "destTable", required = false) String destTable, + @RequestParam(value = "taskDate", required = false) String taskDate, + @RequestParam(value = "startDate", required = false) String startTime, + @RequestParam(value = "endDate", required = false) String endTime, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize + ){ + + try{ + logger.info("query task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}", + taskName, state, taskDate, startTime, endTime); + Map result = taskRecordService.queryTaskRecordListPaging(false, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e); + return error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg()); + } + + } + + /** + * query history task record list paging + * + * @param loginUser + * @return + */ + @GetMapping("/history-list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryHistoryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "taskName", required = false) String taskName, + @RequestParam(value = "state", required = false) String state, + @RequestParam(value = "sourceTable", required = false) String sourceTable, + @RequestParam(value = "destTable", required = false) String destTable, + @RequestParam(value = "taskDate", required = false) String taskDate, + @RequestParam(value = "startDate", required = false) String startTime, + @RequestParam(value = "endDate", required = false) String endTime, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize + ){ + + try{ + logger.info("query hisotry task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}", + taskName, state, taskDate, startTime, endTime); + Map result = taskRecordService.queryTaskRecordListPaging(true, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e); + return error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg()); + } + + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java new file mode 100644 index 0000000000..2ec74398c2 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java @@ -0,0 +1,245 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.TenantService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + + +/** + * tenant controller + */ +@Api(tags = "TENANT_TAG", position = 1) +@RestController +@RequestMapping("/tenant") +public class TenantController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(TenantController.class); + + + @Autowired + private TenantService tenantService; + + /** + * create tenant + * + * @param loginUser + * @param tenantCode + * @param tenantName + * @param queueId + * @param desc + * @return + */ + @ApiOperation(value = "createTenant", notes= "CREATE_TENANT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"), + @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType ="Int",example = "100"), + @ApiImplicitParam(name = "desc", value = "TENANT_DESC", dataType ="String") + + }) + @PostMapping(value = "/create") + @ResponseStatus(HttpStatus.CREATED) + public Result createTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "tenantCode") String tenantCode, + @RequestParam(value = "tenantName") String tenantName, + @RequestParam(value = "queueId") int queueId, + @RequestParam(value = "desc",required = false) String desc) { + logger.info("login user {}, create tenant, tenantCode: {}, tenantName: {}, queueId: {}, desc: {}", + loginUser.getUserName(), tenantCode, tenantName, queueId,desc); + try { + Map result = tenantService.createTenant(loginUser,tenantCode,tenantName,queueId,desc); + return returnDataList(result); + + }catch (Exception e){ + logger.error(Status.CREATE_TENANT_ERROR.getMsg(),e); + return error(Status.CREATE_TENANT_ERROR.getCode(), Status.CREATE_TENANT_ERROR.getMsg()); + } + } + + + /** + * query tenant list paging + * + * @param loginUser + * @param pageNo + * @param searchVal + * @param pageSize + * @return + */ + @ApiOperation(value = "queryTenantlistPaging", notes= "QUERY_TENANT_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + }) + @GetMapping(value="/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryTenantlistPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize){ + logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", + loginUser.getUserName(),pageNo,searchVal,pageSize); + try{ + Map result = checkPageParams(pageNo, pageSize); + if(result.get(Constants.STATUS) != Status.SUCCESS){ + return returnDataListPaging(result); + } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = tenantService.queryTenantList(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(Status.QUERY_TENANT_LIST_PAGING_ERROR.getMsg(),e); + return error(Status.QUERY_TENANT_LIST_PAGING_ERROR.getCode(), Status.QUERY_TENANT_LIST_PAGING_ERROR.getMsg()); + } + } + + + /** + * tenant list + * + * @param loginUser + * @return + */ + @ApiOperation(value = "queryTenantlist", notes= "QUERY_TENANT_LIST_NOTES") + @GetMapping(value="/list") + @ResponseStatus(HttpStatus.OK) + public Result queryTenantlist(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + logger.info("login user {}, query tenant list"); + try{ + Map result = tenantService.queryTenantList(loginUser); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.QUERY_TENANT_LIST_ERROR.getMsg(),e); + return error(Status.QUERY_TENANT_LIST_ERROR.getCode(), Status.QUERY_TENANT_LIST_ERROR.getMsg()); + } + } + + + + /** + * udpate tenant + * + * @param loginUser + * @param tenantCode + * @param tenantName + * @param queueId + * @param desc + * @return + */ + @ApiOperation(value = "updateTenant", notes= "UPDATE_TENANT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"), + @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "desc", value = "TENANT_DESC", type ="String") + + }) + @PostMapping(value = "/update") + @ResponseStatus(HttpStatus.OK) + public Result updateTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int id, + @RequestParam(value = "tenantCode") String tenantCode, + @RequestParam(value = "tenantName") String tenantName, + @RequestParam(value = "queueId") int queueId, + @RequestParam(value = "desc",required = false) String desc) { + logger.info("login user {}, updateProcessInstance tenant, tenantCode: {}, tenantName: {}, queueId: {}, desc: {}", + loginUser.getUserName(), tenantCode, tenantName, queueId,desc); + try { + Map result = tenantService.updateTenant(loginUser,id,tenantCode, tenantName, queueId, desc); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.UPDATE_TENANT_ERROR.getMsg(),e); + return error(Status.UPDATE_TENANT_ERROR.getCode(), Status.UPDATE_TENANT_ERROR.getMsg()); + } + } + + /** + * delete tenant by id + * + * @param loginUser + * @param id + * @return + */ + @ApiOperation(value = "deleteTenantById", notes= "DELETE_TENANT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType ="Int", example = "100") + + }) + @PostMapping(value = "/delete") + @ResponseStatus(HttpStatus.OK) + public Result deleteTenantById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int id) { + logger.info("login user {}, delete tenant, tenantCode: {},", loginUser.getUserName(), id); + try { + Map result = tenantService.deleteTenantById(loginUser,id); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.DELETE_TENANT_BY_ID_ERROR.getMsg(),e); + return error(Status.DELETE_TENANT_BY_ID_ERROR.getCode(), Status.DELETE_TENANT_BY_ID_ERROR.getMsg()); + } + } + + + /** + * verify tenant code + * + * @param loginUser + * @param tenantCode + * @return + */ + @ApiOperation(value = "verifyTenantCode", notes= "VERIFY_TENANT_CODE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String") + }) + @GetMapping(value = "/verify-tenant-code") + @ResponseStatus(HttpStatus.OK) + public Result verifyTenantCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="tenantCode") String tenantCode + ) { + + try{ + logger.info("login user {}, verfiy tenant code: {}", + loginUser.getUserName(),tenantCode); + return tenantService.verifyTenantCode(tenantCode); + }catch (Exception e){ + logger.error(Status.VERIFY_TENANT_CODE_ERROR.getMsg(),e); + return error(Status.VERIFY_TENANT_CODE_ERROR.getCode(), Status.VERIFY_TENANT_CODE_ERROR.getMsg()); + } + } + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java new file mode 100644 index 0000000000..f8aab7c97c --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java @@ -0,0 +1,454 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.UsersService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + + +/** + * user controller + */ +@Api(tags = "USERS_TAG" , position = 14) +@RestController +@RequestMapping("/users") +public class UsersController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(UsersController.class); + + @Autowired + private UsersService usersService; + + /** + * create user + * + * @param loginUser + * @param userName + * @param userPassword + * @param email + * @param tenantId + * @param phone + * @return + */ + @ApiOperation(value = "createUser", notes= "CREATE_USER_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String"), + @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type ="String"), + @ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "phone", value = "PHONE", dataType = "Int", example = "100") + }) + @PostMapping(value = "/create") + @ResponseStatus(HttpStatus.CREATED) + public Result createUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userName") String userName, + @RequestParam(value = "userPassword") String userPassword, + @RequestParam(value = "tenantId") int tenantId, + @RequestParam(value = "queue",required = false,defaultValue = "") String queue, + @RequestParam(value = "email") String email, + @RequestParam(value = "phone", required = false) String phone) { + logger.info("login user {}, create user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}", + loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue); + + try { + Map result = usersService.createUser(loginUser, userName, userPassword,email,tenantId, phone,queue); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.CREATE_USER_ERROR.getMsg(),e); + return error(Status.CREATE_USER_ERROR.getCode(), Status.CREATE_USER_ERROR.getMsg()); + } + } + + /** + * query user list paging + * + * @param loginUser + * @param pageNo + * @param searchVal + * @param pageSize + * @return + */ + @ApiOperation(value = "queryUserList", notes= "QUERY_USER_LIST_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO",dataType = "Int", example = "100"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", type ="String"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String") + }) + @GetMapping(value="/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryUserList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize){ + logger.info("login user {}, list user paging, pageNo: {}, searchVal: {}, pageSize: {}", + loginUser.getUserName(),pageNo,searchVal,pageSize); + try{ + Map result = checkPageParams(pageNo, pageSize); + if(result.get(Constants.STATUS) != Status.SUCCESS){ + return returnDataListPaging(result); + } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = usersService.queryUserList(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(Status.QUERY_USER_LIST_PAGING_ERROR.getMsg(),e); + return error(Status.QUERY_USER_LIST_PAGING_ERROR.getCode(), Status.QUERY_USER_LIST_PAGING_ERROR.getMsg()); + } + } + + + /** + * update user + * + * @param loginUser + * @param id + * @param userName + * @param userPassword + * @param email + * @param tenantId + * @param phone + * @return + */ + @ApiOperation(value = "updateUser", notes= "UPDATE_USER_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "USER_ID",dataType = "Int", example = "100"), + @ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String"), + @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type ="String"), + @ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "phone", value = "PHONE", dataType = "Int", example = "100") + }) + @PostMapping(value = "/update") + @ResponseStatus(HttpStatus.OK) + public Result updateUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int id, + @RequestParam(value = "userName") String userName, + @RequestParam(value = "userPassword") String userPassword, + @RequestParam(value = "queue",required = false,defaultValue = "") String queue, + @RequestParam(value = "email") String email, + @RequestParam(value = "tenantId") int tenantId, + @RequestParam(value = "phone", required = false) String phone) { + logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}", + loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue); + try { + Map result = usersService.updateUser(id, userName, userPassword, email, tenantId, phone, queue); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.UPDATE_USER_ERROR.getMsg(),e); + return error(Status.UPDATE_USER_ERROR.getCode(), Status.UPDATE_USER_ERROR.getMsg()); + } + } + + /** + * delete user by id + * @param loginUser + * @param id + * @return + */ + @ApiOperation(value = "delUserById", notes= "DELETE_USER_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "USER_ID",dataType = "Int", example = "100") + }) + @PostMapping(value = "/delete") + @ResponseStatus(HttpStatus.OK) + public Result delUserById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id") int id) { + logger.info("login user {}, delete user, userId: {},", loginUser.getUserName(), id); + try { + Map result = usersService.deleteUserById(loginUser, id); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.DELETE_USER_BY_ID_ERROR.getMsg(),e); + return error(Status.DELETE_USER_BY_ID_ERROR.getCode(), Status.DELETE_USER_BY_ID_ERROR.getMsg()); + } + } + + /** + * grant project + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "grantProject", notes= "GRANT_PROJECT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), + @ApiImplicitParam(name = "projectIds", value = "PROJECT_IDS",type = "String") + }) + @PostMapping(value = "/grant-project") + @ResponseStatus(HttpStatus.OK) + public Result grantProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userId") int userId, + @RequestParam(value = "projectIds") String projectIds) { + logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(), userId,projectIds); + try { + Map result = usersService.grantProject(loginUser, userId, projectIds); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.GRANT_PROJECT_ERROR.getMsg(),e); + return error(Status.GRANT_PROJECT_ERROR.getCode(), Status.GRANT_PROJECT_ERROR.getMsg()); + } + } + + /** + * grant resource + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "grantResource", notes= "GRANT_RESOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), + @ApiImplicitParam(name = "resourceIds", value = "RESOURCE_IDS",type = "String") + }) + @PostMapping(value = "/grant-file") + @ResponseStatus(HttpStatus.OK) + public Result grantResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userId") int userId, + @RequestParam(value = "resourceIds") String resourceIds) { + logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,resourceIds); + try { + Map result = usersService.grantResources(loginUser, userId, resourceIds); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.GRANT_RESOURCE_ERROR.getMsg(),e); + return error(Status.GRANT_RESOURCE_ERROR.getCode(), Status.GRANT_RESOURCE_ERROR.getMsg()); + } + } + + + /** + * grant udf function + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "grantUDFFunc", notes= "GRANT_UDF_FUNC_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), + @ApiImplicitParam(name = "udfIds", value = "UDF_IDS",type = "String") + }) + @PostMapping(value = "/grant-udf-func") + @ResponseStatus(HttpStatus.OK) + public Result grantUDFFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userId") int userId, + @RequestParam(value = "udfIds") String udfIds) { + logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,udfIds); + try { + Map result = usersService.grantUDFFunction(loginUser, userId, udfIds); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.GRANT_UDF_FUNCTION_ERROR.getMsg(),e); + return error(Status.GRANT_UDF_FUNCTION_ERROR.getCode(), Status.GRANT_UDF_FUNCTION_ERROR.getMsg()); + } + } + + + + /** + * grant datasource + * + * @param loginUser + * @param userId + * @return + */ + @ApiOperation(value = "grantDataSource", notes= "GRANT_DATASOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), + @ApiImplicitParam(name = "datasourceIds", value = "DATASOURCE_IDS",type = "String") + }) + @PostMapping(value = "/grant-datasource") + @ResponseStatus(HttpStatus.OK) + public Result grantDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userId") int userId, + @RequestParam(value = "datasourceIds") String datasourceIds) { + logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(),userId,datasourceIds); + try { + Map result = usersService.grantDataSource(loginUser, userId, datasourceIds); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.GRANT_DATASOURCE_ERROR.getMsg(),e); + return error(Status.GRANT_DATASOURCE_ERROR.getCode(), Status.GRANT_DATASOURCE_ERROR.getMsg()); + } + } + + + /** + * get user info + * + * @param loginUser + * @return + */ + @ApiOperation(value = "getUserInfo", notes= "GET_USER_INFO_NOTES") + @GetMapping(value="/get-user-info") + @ResponseStatus(HttpStatus.OK) + public Result getUserInfo(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + logger.info("login user {},get user info : {}", loginUser.getUserName()); + try{ + Map result = usersService.getUserInfo(loginUser); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.GET_USER_INFO_ERROR.getMsg(),e); + return error(Status.GET_USER_INFO_ERROR.getCode(), Status.GET_USER_INFO_ERROR.getMsg()); + } + } + + /** + * user list no paging + * + * @param loginUser + * @return + */ + @ApiOperation(value = "listUser", notes= "LIST_USER_NOTES") + @GetMapping(value="/list") + @ResponseStatus(HttpStatus.OK) + public Result listUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + logger.info("login user {}, user list"); + try{ + Map result = usersService.queryAllGeneralUsers(loginUser); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.USER_LIST_ERROR.getMsg(),e); + return error(Status.USER_LIST_ERROR.getCode(), Status.USER_LIST_ERROR.getMsg()); + } + } + + + /** + * user list no paging + * + * @param loginUser + * @return + */ + @GetMapping(value="/list-all") + @ResponseStatus(HttpStatus.OK) + public Result listAll(@RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + logger.info("login user {}, user list"); + try{ + Map result = usersService.queryUserList(loginUser); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.USER_LIST_ERROR.getMsg(),e); + return error(Status.USER_LIST_ERROR.getCode(), Status.USER_LIST_ERROR.getMsg()); + } + } + + + /** + * verify username + * + * @param loginUser + * @param userName + * @return + */ + @ApiOperation(value = "verifyUserName", notes= "VERIFY_USER_NAME_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String") + }) + @GetMapping(value = "/verify-user-name") + @ResponseStatus(HttpStatus.OK) + public Result verifyUserName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value ="userName") String userName + ) { + try{ + + logger.info("login user {}, verfiy user name: {}", + loginUser.getUserName(),userName); + return usersService.verifyUserName(userName); + }catch (Exception e){ + logger.error(Status.VERIFY_USERNAME_ERROR.getMsg(),e); + return error(Status.VERIFY_USERNAME_ERROR.getCode(), Status.VERIFY_USERNAME_ERROR.getMsg()); + } + } + + + /** + * unauthorized user + * + * @param loginUser + * @param alertgroupId + * @return + */ + @ApiOperation(value = "unauthorizedUser", notes= "UNAUTHORIZED_USER_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID",type = "String") + }) + @GetMapping(value = "/unauth-user") + @ResponseStatus(HttpStatus.OK) + public Result unauthorizedUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("alertgroupId") Integer alertgroupId) { + try{ + logger.info("unauthorized user, login user:{}, alert group id:{}", + loginUser.getUserName(), alertgroupId); + Map result = usersService.unauthorizedUser(loginUser, alertgroupId); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.UNAUTHORIZED_USER_ERROR.getMsg(),e); + return error(Status.UNAUTHORIZED_USER_ERROR.getCode(), Status.UNAUTHORIZED_USER_ERROR.getMsg()); + } + } + + + /** + * authorized user + * + * @param loginUser + * @param alertgroupId + * @return + */ + @ApiOperation(value = "authorizedUser", notes= "AUTHORIZED_USER_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID",type = "String") + }) + @GetMapping(value = "/authed-user") + @ResponseStatus(HttpStatus.OK) + public Result authorizedUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("alertgroupId") Integer alertgroupId) { + try{ + logger.info("authorized user , login user:{}, alert group id:{}", + loginUser.getUserName(), alertgroupId); + Map result = usersService.authorizedUser(loginUser, alertgroupId); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.AUTHORIZED_USER_ERROR.getMsg(),e); + return error(Status.AUTHORIZED_USER_ERROR.getCode(), Status.AUTHORIZED_USER_ERROR.getMsg()); + } + } + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java new file mode 100644 index 0000000000..d0c934ea6d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java @@ -0,0 +1,170 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.WorkerGroupService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import java.util.Map; + +/** + * worker group controller + */ +@Api(tags = "WORKER_GROUP_TAG", position = 1) +@RestController +@RequestMapping("/worker-group") +public class WorkerGroupController extends BaseController{ + + private static final Logger logger = LoggerFactory.getLogger(WorkerGroupController.class); + + + @Autowired + WorkerGroupService workerGroupService; + + + /** + * create or update a worker group + * @param loginUser + * @param id + * @param name + * @param ipList + * @return + */ + @ApiOperation(value = "saveWorkerGroup", notes= "CREATE_WORKER_GROUP_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", dataType = "Int", example = "10", defaultValue = "0"), + @ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType ="String") + }) + @PostMapping(value = "/save") + @ResponseStatus(HttpStatus.OK) + public Result saveWorkerGroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "id", required = false, defaultValue = "0") int id, + @RequestParam(value = "name") String name, + @RequestParam(value = "ipList") String ipList + ) { + logger.info("save worker group: login user {}, id:{}, name: {}, ipList: {} ", + loginUser.getUserName(), id, name, ipList); + + try { + Map result = workerGroupService.saveWorkerGroup(id, name, ipList); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.SAVE_ERROR.getMsg(),e); + return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg()); + } + } + + /** + * query worker groups paging + * @param loginUser + * @param pageNo + * @param searchVal + * @param pageSize + * @return + */ + @ApiOperation(value = "queryAllWorkerGroupsPaging", notes= "QUERY_WORKER_GROUP_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", dataType = "Int", example = "10", defaultValue = "0"), + @ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType ="String") + }) + @GetMapping(value = "/list-paging") + @ResponseStatus(HttpStatus.OK) + public Result queryAllWorkerGroupsPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize + ) { + logger.info("query all worker group paging: login user {}, pageNo:{}, pageSize:{}, searchVal:{}", + loginUser.getUserName() , pageNo, pageSize, searchVal); + + try { + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = workerGroupService.queryAllGroupPaging(pageNo, pageSize, searchVal); + return returnDataListPaging(result); + }catch (Exception e){ + logger.error(Status.SAVE_ERROR.getMsg(),e); + return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg()); + } + } + + /** + * query all worker groups + * @param loginUser + * @return + */ + @ApiOperation(value = "queryAllWorkerGroups", notes= "QUERY_WORKER_GROUP_LIST_NOTES") + @GetMapping(value = "/all-groups") + @ResponseStatus(HttpStatus.OK) + public Result queryAllWorkerGroups(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser + ) { + logger.info("query all worker group: login user {}", + loginUser.getUserName() ); + + try { + Map result = workerGroupService.queryAllGroup(); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.SAVE_ERROR.getMsg(),e); + return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg()); + } + } + + /** + * delete worker group by id + * @param loginUser + * @param id + * @return + */ + @ApiOperation(value = "deleteById", notes= "DELETE_WORKER_GROUP_BY_ID_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", required = true, dataType = "Int", example = "10"), + + }) + @GetMapping(value = "/delete-by-id") + @ResponseStatus(HttpStatus.OK) + public Result deleteById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("id") Integer id + ) { + logger.info("delete worker group: login user {}, id:{} ", + loginUser.getUserName() , id); + + try { + Map result = workerGroupService.deleteWorkerGroupById(id); + return returnDataList(result); + }catch (Exception e){ + logger.error(Status.SAVE_ERROR.getMsg(),e); + return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg()); + } + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/CommandStateCount.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/CommandStateCount.java new file mode 100644 index 0000000000..3c3c31bfce --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/CommandStateCount.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto; + +import org.apache.dolphinscheduler.common.enums.CommandType; + +/** + * command state count + */ +public class CommandStateCount { + + private int errorCount; + private int normalCount; + private CommandType commandState; + + public CommandStateCount(){} + public CommandStateCount(int errorCount, int normalCount, CommandType commandState) { + this.errorCount = errorCount; + this.normalCount = normalCount; + this.commandState = commandState; + } + + public int getErrorCount() { + return errorCount; + } + + public void setErrorCount(int errorCount) { + this.errorCount = errorCount; + } + + public int getNormalCount() { + return normalCount; + } + + public void setNormalCount(int normalCount) { + this.normalCount = normalCount; + } + + public CommandType getCommandState() { + return commandState; + } + + public void setCommandState(CommandType commandState) { + this.commandState = commandState; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DefineUserDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DefineUserDto.java new file mode 100644 index 0000000000..539dc71a5e --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DefineUserDto.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto; + +import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser; + +import java.util.List; + +/** + * + */ +public class DefineUserDto { + + private int count; + + private List userList; + + public DefineUserDto(List defineGroupByUsers) { + + for(DefinitionGroupByUser define : defineGroupByUsers){ + count += define.getCount(); + } + this.userList = defineGroupByUsers; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + public List getUserList() { + return userList; + } + + public void setUserList(List userList) { + this.userList = userList; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ScheduleParam.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ScheduleParam.java new file mode 100644 index 0000000000..538786e63d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ScheduleParam.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto; + +import java.util.Date; + +/** + * schedule parameters + * 调度参数 + */ +public class ScheduleParam { + private Date startTime; + private Date endTime; + private String crontab; + + public ScheduleParam() { + } + + public ScheduleParam(Date startTime, Date endTime, String crontab) { + this.startTime = startTime; + this.endTime = endTime; + this.crontab = crontab; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getCrontab() { + return crontab; + } + + public void setCrontab(String crontab) { + this.crontab = crontab; + } + + + @Override + public String toString() { + return "ScheduleParam{" + + "startTime=" + startTime + + ", endTime=" + endTime + + ", crontab='" + crontab + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java new file mode 100644 index 0000000000..b6b587bde6 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; + +import java.util.ArrayList; +import java.util.List; + +/** + * task count dto + */ +public class TaskCountDto { + + /** + * total count + */ + private int totalCount; + + /** + * + */ + private List taskCountDtos; + + + public TaskCountDto(List taskInstanceStateCounts) { + countTaskDtos(taskInstanceStateCounts); + } + + private void countTaskDtos(List taskInstanceStateCounts){ + int submitted_success = 0; + int running_exeution = 0; + int ready_pause = 0; + int pause = 0; + int ready_stop = 0; + int stop = 0; + int failure = 0; + int success = 0; + int need_fault_tolerance = 0; + int kill = 0; + int waitting_thread = 0; + int waitting_depend = 0; + + for(ExecuteStatusCount taskInstanceStateCount : taskInstanceStateCounts){ + ExecutionStatus status = taskInstanceStateCount.getExecutionStatus(); + totalCount += taskInstanceStateCount.getCount(); + switch (status){ + case SUBMITTED_SUCCESS: + submitted_success += taskInstanceStateCount.getCount(); + break; + case RUNNING_EXEUTION: + running_exeution += taskInstanceStateCount.getCount(); + break; + case READY_PAUSE: + ready_pause += taskInstanceStateCount.getCount(); + break; + case PAUSE: + pause += taskInstanceStateCount.getCount(); + break; + case READY_STOP: + ready_stop += taskInstanceStateCount.getCount(); + break; + case STOP: + stop += taskInstanceStateCount.getCount(); + break; + case FAILURE: + failure += taskInstanceStateCount.getCount(); + break; + case SUCCESS: + success += taskInstanceStateCount.getCount(); + break; + case NEED_FAULT_TOLERANCE: + failure += taskInstanceStateCount.getCount(); + break; + case KILL: + kill += taskInstanceStateCount.getCount(); + break; + case WAITTING_THREAD: + kill += taskInstanceStateCount.getCount(); + break; + case WAITTING_DEPEND: + kill += taskInstanceStateCount.getCount(); + break; + + default: + break; + } + } + this.taskCountDtos = new ArrayList<>(); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUBMITTED_SUCCESS, submitted_success)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXEUTION, running_exeution)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_PAUSE, ready_pause)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.PAUSE, pause)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_STOP, ready_stop)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.STOP, stop)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.FAILURE, failure)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUCCESS, success)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.NEED_FAULT_TOLERANCE, need_fault_tolerance)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.KILL, kill)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_THREAD, waitting_thread)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_DEPEND, waitting_depend)); + } + + + public List getTaskCountDtos(){ + return taskCountDtos; + } + + public void setTaskCountDtos(List taskCountDtos) { + this.taskCountDtos = taskCountDtos; + } + + public int getTotalCount() { + return totalCount; + } + + public void setTotalCount(int totalCount) { + this.totalCount = totalCount; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskStateCount.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskStateCount.java new file mode 100644 index 0000000000..a2fe348e40 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskStateCount.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; + +/** + * task state count + */ +public class TaskStateCount { + + private int count; + private ExecutionStatus taskStateType; + + public TaskStateCount(ExecutionStatus taskStateType, int count) { + this.taskStateType = taskStateType; + this.count = count; + } + + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + public ExecutionStatus getTaskStateType() { + return taskStateType; + } + + public void setTaskStateType(ExecutionStatus taskStateType) { + this.taskStateType = taskStateType; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/gantt/GanttDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/gantt/GanttDto.java new file mode 100644 index 0000000000..d6fa662bcd --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/gantt/GanttDto.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto.gantt; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * gantt DTO + * 甘特图 DTO + */ +public class GanttDto { + + /** + * height + * 高度 + */ + private int height; + + /** + * tasks list + * 任务集合 + */ + private List tasks = new ArrayList<>(); + + /** + * task name list + * 任务名称 + */ + private List taskNames; + + /** + * task status map + * 任务状态 + */ + private Map taskStatus; + + + public GanttDto(){ + this.taskStatus = new HashMap<>(); + taskStatus.put("success","success"); + } + public GanttDto(int height, List tasks, List taskNames){ + this(); + this.height = height; + this.tasks = tasks; + this.taskNames = taskNames;; + } + public GanttDto(int height, List tasks, List taskNames, Map taskStatus) { + this.height = height; + this.tasks = tasks; + this.taskNames = taskNames; + this.taskStatus = taskStatus; + } + + public int getHeight() { + return height; + } + + public void setHeight(int height) { + this.height = height; + } + + public List getTasks() { + return tasks; + } + + public void setTasks(List tasks) { + this.tasks = tasks; + } + + public List getTaskNames() { + return taskNames; + } + + public void setTaskNames(List taskNames) { + this.taskNames = taskNames; + } + + public Map getTaskStatus() { + return taskStatus; + } + + public void setTaskStatus(Map taskStatus) { + this.taskStatus = taskStatus; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/gantt/Task.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/gantt/Task.java new file mode 100644 index 0000000000..848abdf5d6 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/gantt/Task.java @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto.gantt; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +/** + * Task + * 任务 + */ +public class Task { + /** + * task name + * 任务名称 + */ + private String taskName; + + /** + * task start date + * 任务开始时间 + */ + private List startDate = new ArrayList<>(); + /** + * task end date + * 任务结束时间 + */ + private List endDate = new ArrayList<>(); + + /** + * task execution date + * 任务执行时间 + */ + private Date executionDate; + + /** + * task iso start + * 任务开始时间 + */ + private Date isoStart; + + /** + * task iso end + * 任务结束时间 + */ + private Date isoEnd; + + /** + * task status + * 执行状态 + */ + private String status; + + /** + * task duration + * 运行时长 + */ + private String duration; + + public String getTaskName() { + return taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public List getStartDate() { + return startDate; + } + + public void setStartDate(List startDate) { + this.startDate = startDate; + } + + public List getEndDate() { + return endDate; + } + + public void setEndDate(List endDate) { + this.endDate = endDate; + } + + public Date getExecutionDate() { + return executionDate; + } + + public void setExecutionDate(Date executionDate) { + this.executionDate = executionDate; + } + + public Date getIsoStart() { + return isoStart; + } + + public void setIsoStart(Date isoStart) { + this.isoStart = isoStart; + } + + public Date getIsoEnd() { + return isoEnd; + } + + public void setIsoEnd(Date isoEnd) { + this.isoEnd = isoEnd; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getDuration() { + return duration; + } + + public void setDuration(String duration) { + this.duration = duration; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/Instance.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/Instance.java new file mode 100644 index 0000000000..5997f04906 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/Instance.java @@ -0,0 +1,171 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto.treeview; + +import java.util.Date; + +/** + * Instance + */ +public class Instance { + + private int id; + /** + * node name + * 节点名称 + */ + private String name; + + /** + * node type + * 节点类型 + */ + private String type; + + /** + * node status + * 状态 + */ + private String state; + + /** + * node start time + * 开始时间 + */ + private Date startTime; + + /** + * node end time + * 结束时间 + */ + private Date endTime; + + + + /** + * node running on which host + * 运行机器 + */ + private String host; + + /** + * node duration + * 运行时长 + */ + private String duration; + + private int subflowId; + + + public Instance(){} + + public Instance(int id,String name, String type){ + this.id = id; + this.name = name; + this.type = type; + } + + public Instance(int id,String name, String type,String state,Date startTime, Date endTime, String host, String duration,int subflowId) { + this.id = id; + this.name = name; + this.type = type; + this.state = state; + this.startTime = startTime; + this.endTime = endTime; + this.host = host; + this.duration = duration; + this.subflowId = subflowId; + } + + public Instance(int id,String name, String type,String state,Date startTime, Date endTime, String host, String duration) { + this(id, name, type, state, startTime, endTime,host,duration,0); + } + + + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getState() { + return state; + } + + public void setState(String state) { + this.state = state; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getDuration() { + return duration; + } + + public void setDuration(String duration) { + this.duration = duration; + } + + public int getSubflowId() { + return subflowId; + } + + public void setSubflowId(int subflowId) { + this.subflowId = subflowId; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/TreeViewDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/TreeViewDto.java new file mode 100644 index 0000000000..35be4d5ec7 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/TreeViewDto.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto.treeview; + +import java.util.ArrayList; +import java.util.List; + +/** + * TreeView + */ +public class TreeViewDto { + + /** + * name + */ + private String name; + + /** + * type + */ + private String type; + + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + /** + * instances list + * 实例列表 + */ + + private List instances = new ArrayList<>(); + + /** + * children + */ + private List children = new ArrayList<>(); + + + public List getInstances() { + return instances; + } + + public void setInstances(List instances) { + this.instances = instances; + } + + public List getChildren() { + return children; + } + + public void setChildren(List children) { + this.children = children; + } + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/ExecuteType.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/ExecuteType.java new file mode 100644 index 0000000000..0af66ff5f0 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/ExecuteType.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.enums; + +/** + * execute type + */ +public enum ExecuteType { + + + /** + * 操作类型 + * 1.重跑 2.恢复暂停 3.恢复失败 4.停止 5.暂停 + */ + NONE,REPEAT_RUNNING, RECOVER_SUSPENDED_PROCESS, START_FAILURE_TASK_PROCESS, STOP, PAUSE; + + + public static ExecuteType getEnum(int value){ + for (ExecuteType e: ExecuteType.values()) { + if(e.ordinal() == value) { + return e; + } + } + return null;//For values out of enum scope + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java new file mode 100644 index 0000000000..2c23d2103b --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java @@ -0,0 +1,266 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.enums; + +/** + * status enum + */ +public enum Status { + + SUCCESS(0, "success"), + + REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid"), + TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid"), + USER_NAME_EXIST(10003, "user name already exists"), + USER_NAME_NULL(10004,"user name is null"), +// DB_OPERATION_ERROR(10005, "database operation error"), + HDFS_OPERATION_ERROR(10006, "hdfs operation error"), + UPDATE_FAILED(10007, "updateProcessInstance failed"), + TASK_INSTANCE_NOT_FOUND(10008, "task instance not found"), + TENANT_NAME_EXIST(10009, "tenant code already exists"), + USER_NOT_EXIST(10010, "user {0} not exists"), + ALERT_GROUP_NOT_EXIST(10011, "alarm group not found"), + ALERT_GROUP_EXIST(10012, "alarm group already exists"), + USER_NAME_PASSWD_ERROR(10013,"user name or password error"), + LOGIN_SESSION_FAILED(10014,"create session failed!"), + DATASOURCE_EXIST(10015, "data source name already exists"), + DATASOURCE_CONNECT_FAILED(10016, "data source connection failed"), + TENANT_NOT_EXIST(10017, "tenant not exists"), + PROJECT_NOT_FOUNT(10018, "project {0} not found "), + PROJECT_ALREADY_EXISTS(10019, "project {0} already exists"), + TASK_INSTANCE_NOT_EXISTS(10020, "task instance {0} does not exist"), + TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE(10021, "task instance {0} is not sub process instance"), + SCHEDULE_CRON_NOT_EXISTS(10022, "scheduler crontab {0} does not exist"), + SCHEDULE_CRON_ONLINE_FORBID_UPDATE(10023, "online status does not allow updateProcessInstance operations"), + SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}"), + MASTER_NOT_EXISTS(10025, "master does not exist"), + SCHEDULE_STATUS_UNKNOWN(10026, "unknown command: {0}"), + CREATE_ALERT_GROUP_ERROR(10027,"create alert group error"), + QUERY_ALL_ALERTGROUP_ERROR(10028,"query all alertgroup error"), + LIST_PAGING_ALERT_GROUP_ERROR(10029,"list paging alert group error"), + UPDATE_ALERT_GROUP_ERROR(10030,"updateProcessInstance alert group error"), + DELETE_ALERT_GROUP_ERROR(10031,"delete alert group error"), + ALERT_GROUP_GRANT_USER_ERROR(10032,"alert group grant user error"), + CREATE_DATASOURCE_ERROR(10033,"create datasource error"), + UPDATE_DATASOURCE_ERROR(10034,"updateProcessInstance datasource error"), + QUERY_DATASOURCE_ERROR(10035,"query datasource error"), + CONNECT_DATASOURCE_FAILURE(10036,"connect datasource failure"), + CONNECTION_TEST_FAILURE(10037,"connection test failure"), + DELETE_DATA_SOURCE_FAILURE(10038,"delete data source failure"), + VERFIY_DATASOURCE_NAME_FAILURE(10039,"verfiy datasource name failure"), + UNAUTHORIZED_DATASOURCE(10040,"unauthorized datasource"), + AUTHORIZED_DATA_SOURCE(10041,"authorized data source"), + LOGIN_SUCCESS(10042,"login success"), + USER_LOGIN_FAILURE(10043,"user login failure"), + LIST_WORKERS_ERROR(10044,"list workers error"), + LIST_MASTERS_ERROR(10045,"list masters error"), + UPDATE_PROJECT_ERROR(10046,"updateProcessInstance project error"), + QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047,"query project details by id error"), + CREATE_PROJECT_ERROR(10048,"create project error"), + LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049,"login user query project list paging error"), + DELETE_PROJECT_ERROR(10050,"delete project error"), + QUERY_UNAUTHORIZED_PROJECT_ERROR(10051,"query unauthorized project error"), + QUERY_AUTHORIZED_PROJECT(10052,"query authorized project"), + QUERY_QUEUE_LIST_ERROR(10053,"query queue list error"), + CREATE_RESOURCE_ERROR(10054,"create resource error"), + UPDATE_RESOURCE_ERROR(10055,"updateProcessInstance resource error"), + QUERY_RESOURCES_LIST_ERROR(10056,"query resources list error"), + QUERY_RESOURCES_LIST_PAGING(10057,"query resources list paging"), + DELETE_RESOURCE_ERROR(10058,"delete resource error"), + VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059,"verify resource by name and type error"), + VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060,"view resource file online error"), + CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061,"create resource file online error"), + RESOURCE_FILE_IS_EMPTY(10062,"resource file is empty"), + EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063,"edit resource file online error"), + DOWNLOAD_RESOURCE_FILE_ERROR(10064,"download resource file error"), + CREATE_UDF_FUNCTION_ERROR(10065 ,"create udf function error"), + VIEW_UDF_FUNCTION_ERROR( 10066,"view udf function error"), + UPDATE_UDF_FUNCTION_ERROR(10067,"updateProcessInstance udf function error"), + QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( 10068,"query udf function list paging error"), + QUERY_DATASOURCE_BY_TYPE_ERROR( 10069,"query datasource by type error"), + VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error"), + DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error"), + AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error"), + UNAUTHORIZED_FILE_RESOURCE_ERROR( 10073,"unauthorized file resource error"), + UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error"), + AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error"), + CREATE_SCHEDULE_ERROR(10076,"create schedule error"), + UPDATE_SCHEDULE_ERROR(10077,"updateProcessInstance schedule error"), + PUBLISH_SCHEDULE_ONLINE_ERROR(10078,"publish schedule online error"), + OFFLINE_SCHEDULE_ERROR(10079,"offline schedule error"), + QUERY_SCHEDULE_LIST_PAGING_ERROR(10080,"query schedule list paging error"), + QUERY_SCHEDULE_LIST_ERROR(10081,"query schedule list error"), + QUERY_TASK_LIST_PAGING_ERROR(10082,"query task list paging error"), + QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083,"query task record list paging error"), + CREATE_TENANT_ERROR(10084,"create tenant error"), + QUERY_TENANT_LIST_PAGING_ERROR(10085,"query tenant list paging error"), + QUERY_TENANT_LIST_ERROR(10086,"query tenant list error"), + UPDATE_TENANT_ERROR(10087,"updateProcessInstance tenant error"), + DELETE_TENANT_BY_ID_ERROR(10088,"delete tenant by id error"), + VERIFY_TENANT_CODE_ERROR(10089,"verify tenant code error"), + CREATE_USER_ERROR(10090,"create user error"), + QUERY_USER_LIST_PAGING_ERROR(10091,"query user list paging error"), + UPDATE_USER_ERROR(10092,"updateProcessInstance user error"), + DELETE_USER_BY_ID_ERROR(10093,"delete user by id error"), + GRANT_PROJECT_ERROR(10094,"grant project error"), + GRANT_RESOURCE_ERROR(10095,"grant resource error"), + GRANT_UDF_FUNCTION_ERROR(10096,"grant udf function error"), + GRANT_DATASOURCE_ERROR(10097,"grant datasource error"), + GET_USER_INFO_ERROR(10098,"get user info error"), + USER_LIST_ERROR(10099,"user list error"), + VERIFY_USERNAME_ERROR(10100,"verify username error"), + UNAUTHORIZED_USER_ERROR(10101,"unauthorized user error"), + AUTHORIZED_USER_ERROR(10102,"authorized user error"), + QUERY_TASK_INSTANCE_LOG_ERROR(10103,"view task instance log error"), + DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104,"download task instance log file error"), + CREATE_PROCESS_DEFINITION(10105,"create process definition"), + VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106,"verify process definition name unique error"), + UPDATE_PROCESS_DEFINITION_ERROR(10107,"updateProcessInstance process definition error"), + RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error"), + QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error"), + QUERY_PROCCESS_DEFINITION_LIST(10110,"query proccess definition list"), + ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error"), + GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error"), + QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error"), + QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114,"query task list by process instance id error"), + UPDATE_PROCESS_INSTANCE_ERROR(10115,"updateProcessInstance process instance error"), + QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116,"query process instance by id error"), + DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117,"delete process instance by id error"), + QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,"query sub process instance detail info by task id error"), + QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error"), + QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error"), + ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error"), + QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query proccess definition list paging error"), + SIGN_OUT_ERROR(10123,"sign out error"), + TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists"), + IP_IS_EMPTY(10125,"ip is empty"), + SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}"), + CREATE_QUEUE_ERROR(10127, "create queue error"), + QUEUE_NOT_EXIST(10128, "queue {0} not exists"), + QUEUE_VALUE_EXIST(10129, "queue value {0} already exists"), + QUEUE_NAME_EXIST(10130, "queue name {0} already exists"), + UPDATE_QUEUE_ERROR(10131, "update queue error"), + NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required"), + VERIFY_QUEUE_ERROR(10133,"verify queue error"), + NAME_NULL(10134,"name must be not null"), + NAME_EXIST(10135, "name {0} already exists"), + SAVE_ERROR(10136, "save error"), + DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!"), + BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117,"batch delete process instance by ids {0} error"), + PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error"), + PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error"), + SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end"), + + + UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found"), + UDF_FUNCTION_EXISTS(20002, "UDF function already exists"), +// RESOURCE_EMPTY(20003, "resource file is empty"), + RESOURCE_NOT_EXIST(20004, "resource not exist"), + RESOURCE_EXIST(20005, "resource already exists"), + RESOURCE_SUFFIX_NOT_SUPPORT_VIEW(20006, "resource suffix do not support online viewing"), + RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit"), + RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified"), + UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar"), + HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail"), + RESOURCE_FILE_EXIST(20010, "resource file {0} already exists in hdfs,please delete it or change name!"), + RESOURCE_FILE_NOT_EXIST(20011, "resource file {0} not exists in hdfs!"), + + + + USER_NO_OPERATION_PERM(30001, "user has no operation privilege"), + USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission"), + + + PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist"), + PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists"), + PROCESS_DEFINE_NOT_EXIST(50003, "process definition {0} does not exist"), + PROCESS_DEFINE_NOT_RELEASE(50004, "process definition {0} not on line"), + PROCESS_INSTANCE_ALREADY_CHANGED(50005, "the status of process instance {0} is already {1}"), + PROCESS_INSTANCE_STATE_OPERATION_ERROR(50006, "the status of process instance {0} is {1},Cannot perform {2} operation"), + SUB_PROCESS_INSTANCE_NOT_EXIST(50007, "the task belong to process instance does not exist"), + PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit"), + PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ..."), + PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance"), + TASK_INSTANCE_STATE_COUNT_ERROR(50011,"task instance state count error"), + COUNT_PROCESS_INSTANCE_STATE_ERROR(50012,"count process instance state error"), + COUNT_PROCESS_DEFINITION_USER_ERROR(50013,"count process definition user error"), + START_PROCESS_INSTANCE_ERROR(50014,"start process instance error"), + EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error"), + CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error"), + QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error"), + DATA_IS_NOT_VALID(50017,"data %s not valid"), + DATA_IS_NULL(50018,"data %s is null"), + PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle"), + PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node %s parameter invalid"), + PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line"), + DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error"), + SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line"), + DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024,"delete schedule by id error"), + BATCH_DELETE_PROCESS_DEFINE_ERROR(50025,"batch delete process definition error"), + BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error"), + TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available."), + EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error"), + IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error"), + + HDFS_NOT_STARTUP(60001,"hdfs not startup"), + HDFS_TERANT_RESOURCES_FILE_EXISTS(60002,"resource file exists,please delete resource first"), + HDFS_TERANT_UDFS_FILE_EXISTS(60003,"udf file exists,please delete resource first"), + + /** + * for monitor + */ + QUERY_DATABASE_STATE_ERROR(70001,"query database state error"), + QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error"), + + + + CREATE_ACCESS_TOKEN_ERROR(70001,"create access token error"), + GENERATE_TOKEN_ERROR(70002,"generate token error"), + QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70003,"query access token list paging error"), + + + COMMAND_STATE_COUNT_ERROR(80001,"task instance state count error"), + + QUEUE_COUNT_ERROR(90001,"queue count error"), + + KERBEROS_STARTUP_STATE(100001,"get kerberos startup state error"), + ; + + private int code; + private String msg; + + private Status(int code, String msg) { + this.code = code; + this.msg = msg; + } + + public int getCode() { + return this.code; + } + + public void setCode(int code) { + this.code = code; + } + + public String getMsg() { + return this.msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/DruidStatFilter.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/DruidStatFilter.java new file mode 100644 index 0000000000..feb725a97c --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/DruidStatFilter.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.interceptor; + +import com.alibaba.druid.support.http.WebStatFilter; + +/* this class annotation for druid stat monitor in development +@WebFilter(filterName="druidWebStatFilter",urlPatterns="/*", + initParams={ + @WebInitParam(name="exclusions",value="*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*") + }) */ +public class DruidStatFilter extends WebStatFilter { + + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/DruidStatViewServlet.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/DruidStatViewServlet.java new file mode 100644 index 0000000000..9017622cd3 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/DruidStatViewServlet.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.interceptor; + +import com.alibaba.druid.support.http.StatViewServlet; + + +/* this class annotation for druid stat monitor in development +@WebServlet(urlPatterns = "/druid/*", + initParams={ +// @WebInitParam(name="allow",value="127.0.0.1"), +// @WebInitParam(name="deny",value="192.168.16.111"), + @WebInitParam(name="loginUsername",value="admin"), + @WebInitParam(name="loginPassword",value="escheduler123"), + @WebInitParam(name="resetEnable",value="true") + }) */ +public class DruidStatViewServlet extends StatViewServlet { + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java new file mode 100644 index 0000000000..5b7b64a0dd --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.interceptor; + +import org.apache.dolphinscheduler.api.service.SessionService; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.dao.entity.Session; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.commons.httpclient.HttpStatus; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.servlet.HandlerInterceptor; +import org.springframework.web.servlet.ModelAndView; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +/** + * login interceptor, must login first + */ +public class LoginHandlerInterceptor implements HandlerInterceptor { + private static final Logger logger = LoggerFactory.getLogger(LoginHandlerInterceptor.class); + + @Autowired + private SessionService sessionService; + + @Autowired + private UserMapper userMapper; + + /** + * Intercept the execution of a handler. Called after HandlerMapping determined + * an appropriate handler object, but before HandlerAdapter invokes the handler. + *

DispatcherServlet processes a handler in an execution chain, consisting + * of any number of interceptors, with the handler itself at the end. + * With this method, each interceptor can decide to abort the execution chain, + * typically sending a HTTP error or writing a custom response. + *

Note: special considerations apply for asynchronous + * request processing. For more details see + * {@link org.springframework.web.servlet.AsyncHandlerInterceptor}. + * @param request current HTTP request + * @param response current HTTP response + * @param handler chosen handler to execute, for type and/or instance evaluation + * @return {@code true} if the execution chain should proceed with the + * next interceptor or the handler itself. Else, DispatcherServlet assumes + * that this interceptor has already dealt with the response itself. + * @throws Exception in case of errors + */ + @Override + public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) { + + // get token + String token = request.getHeader("token"); + User user = null; + if (StringUtils.isEmpty(token)){ + Session session = sessionService.getSession(request); + + if (session == null) { + response.setStatus(HttpStatus.SC_UNAUTHORIZED); + logger.info("session info is null "); + return false; + } + + //get user object from session + user = userMapper.selectById(session.getUserId()); + + // if user is null + if (user == null) { + response.setStatus(HttpStatus.SC_UNAUTHORIZED); + logger.info("user does not exist"); + return false; + } + }else { + user = userMapper.queryUserByToken(token); + if (user == null) { + response.setStatus(HttpStatus.SC_UNAUTHORIZED); + logger.info("user token has expired"); + return false; + } + } + request.setAttribute(Constants.SESSION_USER, user); + return true; + } + + @Override + public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception { + + } + + @Override + public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception { + + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/log/LogClient.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/log/LogClient.java new file mode 100644 index 0000000000..792bb8a0d4 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/log/LogClient.java @@ -0,0 +1,134 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.log; + +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.StatusRuntimeException; +import org.apache.dolphinscheduler.rpc.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.TimeUnit; + +/** + * log client + */ +public class LogClient { + + private static final Logger logger = LoggerFactory.getLogger(LogClient.class); + + private final ManagedChannel channel; + private final LogViewServiceGrpc.LogViewServiceBlockingStub blockingStub; + + /** + * construct client connecting to HelloWorld server at {@code host:port} + */ + public LogClient(String host, int port) { + this(ManagedChannelBuilder.forAddress(host, port) + // Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid + // needing certificates. + .usePlaintext(true)); + } + + /** + * construct client for accessing RouteGuide server using the existing channel + * + */ + LogClient(ManagedChannelBuilder channelBuilder) { + /** + * set max read size + */ + channelBuilder.maxInboundMessageSize(Integer.MAX_VALUE); + channel = channelBuilder.build(); + blockingStub = LogViewServiceGrpc.newBlockingStub(channel); + } + + /** + * shutdown + * + * @throws InterruptedException + */ + public void shutdown() throws InterruptedException { + channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); + } + + /** + * roll view log + * + * @param path + * @param skipLineNum + * @param limit + * @return + */ + public String rollViewLog(String path,int skipLineNum,int limit) { + logger.info("roll view log : path {},skipLineNum {} ,limit {}", path, skipLineNum, limit); + LogParameter pathParameter = LogParameter + .newBuilder() + .setPath(path) + .setSkipLineNum(skipLineNum) + .setLimit(limit) + .build(); + RetStrInfo retStrInfo; + try { + retStrInfo = blockingStub.rollViewLog(pathParameter); + return retStrInfo.getMsg(); + } catch (StatusRuntimeException e) { + logger.error("roll view log error", e); + return null; + } + } + + /** + * view log + * + * @param path + * @return + */ + public String viewLog(String path) { + logger.info("view log path {}",path); + PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); + RetStrInfo retStrInfo; + try { + retStrInfo = blockingStub.viewLog(pathParameter); + return retStrInfo.getMsg(); + } catch (StatusRuntimeException e) { + logger.error("view log error", e); + return null; + } + } + + /** + * get log size + * + * @param path + * @return + */ + public byte[] getLogBytes(String path) { + logger.info("log path {}",path); + PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); + RetByteInfo retByteInfo; + try { + retByteInfo = blockingStub.getLogBytes(pathParameter); + return retByteInfo.getData().toByteArray(); + } catch (StatusRuntimeException e) { + logger.error("log size error", e); + return null; + } + } + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java new file mode 100644 index 0000000000..0a916330e0 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java @@ -0,0 +1,181 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.AccessToken; +import org.apache.dolphinscheduler.dao.entity.User; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.EncryptionUtils; +import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.*; + +/** + * user service + */ +@Service +public class AccessTokenService extends BaseService { + + private static final Logger logger = LoggerFactory.getLogger(AccessTokenService.class); + + @Autowired + private AccessTokenMapper accessTokenMapper; + + + /** + * query access token list + * + * @param loginUser + * @param searchVal + * @param pageNo + * @param pageSize + * @return + */ + public Map queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(5); + + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + Page page = new Page(pageNo, pageSize); + int userId = loginUser.getId(); + if (loginUser.getUserType() == UserType.ADMIN_USER){ + userId = 0; + } + IPage accessTokenList = accessTokenMapper.selectAccessTokenPage(page, searchVal, userId); + pageInfo.setTotalCount((int)accessTokenList.getTotal()); + pageInfo.setLists(accessTokenList.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * check + * + * @param result + * @param bool + * @param userNoOperationPerm + * @param status + * @return + */ + private boolean check(Map result, boolean bool, Status userNoOperationPerm, String status) { + //only admin can operate + if (bool) { + result.put(Constants.STATUS, userNoOperationPerm); + result.put(status, userNoOperationPerm.getMsg()); + return true; + } + return false; + } + + + /** + * create token + * + * @param userId + * @param expireTime + * @param token + * @return + */ + public Map createToken(int userId, String expireTime, String token) { + Map result = new HashMap<>(5); + + AccessToken accessToken = new AccessToken(); + accessToken.setUserId(userId); + accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); + accessToken.setToken(token); + accessToken.setCreateTime(new Date()); + accessToken.setUpdateTime(new Date()); + + // insert + int insert = accessTokenMapper.insert(accessToken); + + if (insert > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.CREATE_ALERT_GROUP_ERROR); + } + + return result; + } + + /** + * generate token + * @param userId + * @param expireTime + * @return + */ + public Map generateToken(int userId, String expireTime) { + Map result = new HashMap<>(5); + String token = EncryptionUtils.getMd5(userId + expireTime + String.valueOf(System.currentTimeMillis())); + result.put(Constants.DATA_LIST, token); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete access token + * @param loginUser + * @param id + * @return + */ + public Map delAccessTokenById(User loginUser, int id) { + Map result = new HashMap<>(5); + //only admin can operate + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NOT_EXIST, id); + return result; + } + + accessTokenMapper.deleteById(id); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * update token by id + * @param id + * @param userId + * @param expireTime + * @param token + * @return + */ + public Map updateToken(int id,int userId, String expireTime, String token) { + Map result = new HashMap<>(5); + AccessToken accessToken = new AccessToken(); + accessToken.setId(id); + accessToken.setUserId(userId); + accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); + accessToken.setToken(token); + accessToken.setUpdateTime(new Date()); + + accessTokenMapper.updateById(accessToken); + + putMsg(result, Status.SUCCESS); + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java new file mode 100644 index 0000000000..7206b96055 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java @@ -0,0 +1,294 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.AlertGroup; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.UserAlertGroup; +import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; +import org.apache.dolphinscheduler.dao.mapper.UserAlertGroupMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * alert group service + */ +@Service +public class AlertGroupService { + + private static final Logger logger = LoggerFactory.getLogger(AlertGroupService.class); + + @Autowired + private AlertGroupMapper alertGroupMapper; + + @Autowired + private UserAlertGroupMapper userAlertGroupMapper; + + /** + * query alert group list + * + * @return + */ + public HashMap queryAlertgroup() { + + HashMap result = new HashMap<>(5); + List alertGroups = alertGroupMapper.queryAllGroupList(); + result.put(Constants.DATA_LIST, alertGroups); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * paging query alarm group list + * + * @param loginUser + * @param searchVal + * @param pageNo + * @param pageSize + * @return + */ + public Map listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + + Map result = new HashMap<>(5); + + Page page = new Page(pageNo, pageSize); + IPage alertGroupIPage = alertGroupMapper.queryAlertGroupPage( + page, searchVal); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int)alertGroupIPage.getTotal()); + pageInfo.setLists(alertGroupIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * create alert group + * + * @param loginUser + * @param groupName + * @param groupType + * @param desc + * @return + */ + public Map createAlertgroup(User loginUser, String groupName, AlertType groupType, String desc) { + Map result = new HashMap<>(5); + //only admin can operate + if (checkAdmin(loginUser, result)){ + return result; + } + + AlertGroup alertGroup = new AlertGroup(); + Date now = new Date(); + + alertGroup.setGroupName(groupName); + alertGroup.setGroupType(groupType); + alertGroup.setDescription(desc); + alertGroup.setCreateTime(now); + alertGroup.setUpdateTime(now); + + // insert + int insert = alertGroupMapper.insert(alertGroup); + + if (insert > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.CREATE_ALERT_GROUP_ERROR); + } + return result; + } + + /** + * check user is admin or not + * + * @param user + * @return + */ + public boolean isAdmin(User user) { + return user.getUserType() == UserType.ADMIN_USER; + } + + /** + * updateProcessInstance alert group + * + * @param loginUser + * @param id + * @param groupName + * @param groupType + * @param desc + * @return + */ + public Map updateAlertgroup(User loginUser, int id, String groupName, AlertType groupType, String desc) { + Map result = new HashMap<>(5); + + if (checkAdmin(loginUser, result)){ + return result; + } + + + AlertGroup alertGroup = alertGroupMapper.selectById(id); + + if (alertGroup == null) { + putMsg(result, Status.ALERT_GROUP_NOT_EXIST); + return result; + + } + + Date now = new Date(); + + if (StringUtils.isNotEmpty(groupName)) { + alertGroup.setGroupName(groupName); + } + + if (groupType != null) { + alertGroup.setGroupType(groupType); + } + alertGroup.setDescription(desc); + alertGroup.setUpdateTime(now); + // updateProcessInstance + alertGroupMapper.updateById(alertGroup); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete alert group by id + * + * @param loginUser + * @param id + * @return + */ + public Map delAlertgroupById(User loginUser, int id) { + Map result = new HashMap<>(5); + result.put(Constants.STATUS, false); + + //only admin can operate + if (checkAdmin(loginUser, result)){ + return result; + } + + + alertGroupMapper.deleteById(id); + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * grant user + * + * @param loginUser + * @param alertgroupId + * @param userIds + * @return + */ + public Map grantUser(User loginUser, int alertgroupId, String userIds) { + Map result = new HashMap<>(5); + result.put(Constants.STATUS, false); + + //only admin can operate + if (checkAdmin(loginUser, result)){ + return result; + } + + userAlertGroupMapper.deleteByAlertgroupId(alertgroupId); + if (StringUtils.isEmpty(userIds)) { + putMsg(result, Status.SUCCESS); + return result; + } + + String[] userIdsArr = userIds.split(","); + + for (String userId : userIdsArr) { + Date now = new Date(); + UserAlertGroup userAlertGroup = new UserAlertGroup(); + userAlertGroup.setAlertgroupId(alertgroupId); + userAlertGroup.setUserId(Integer.parseInt(userId)); + userAlertGroup.setCreateTime(now); + userAlertGroup.setUpdateTime(now); + userAlertGroupMapper.insert(userAlertGroup); + } + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * verify group name exists + * + * @param loginUser + * @param groupName + * @return + */ + public Result verifyGroupName(User loginUser, String groupName) { + Result result = new Result(); + List alertGroup = alertGroupMapper.queryByGroupName(groupName); + if (alertGroup != null && alertGroup.size() > 0) { + logger.error("group {} has exist, can't create again.", groupName); + result.setCode(Status.ALERT_GROUP_EXIST.getCode()); + result.setMsg(Status.ALERT_GROUP_EXIST.getMsg()); + } else { + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(Status.SUCCESS.getMsg()); + } + + return result; + } + + /** + * is admin? + * @param loginUser + * @param result + * @return + */ + private boolean checkAdmin(User loginUser, Map result) { + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return true; + } + return false; + } + + /** + * put message + * + * @param result + * @param status + */ + private void putMsg(Map result, Status status) { + result.put(Constants.STATUS, status); + result.put(Constants.MSG, status.getMsg()); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java new file mode 100644 index 0000000000..23b7c6a52a --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessData; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; + +import java.util.ArrayList; +import java.util.List; + +/** + * base DAG service + */ +public class BaseDAGService extends BaseService{ + + + /** + * process instance to DAG + * + * @param processInstance + * @return + * @throws Exception + */ + public static DAG processInstance2DAG(ProcessInstance processInstance) throws Exception { + + String processDefinitionJson = processInstance.getProcessInstanceJson(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + List taskNodeList = processData.getTasks(); + + List taskNodeRelations = new ArrayList<>(); + + //Traversing node information and building relationships + for (TaskNode taskNode : taskNodeList) { + String preTasks = taskNode.getPreTasks(); + List preTasksList = JSONUtils.toList(preTasks, String.class); + + //if previous tasks not empty + if (preTasksList != null) { + for (String depNode : preTasksList) { + taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); + } + } + } + + ProcessDag processDag = new ProcessDag(); + processDag.setEdges(taskNodeRelations); + processDag.setNodes(taskNodeList); + + + // generate detail Dag, to be executed + DAG dag = new DAG<>(); + + if (CollectionUtils.isNotEmpty(processDag.getNodes())) { + for (TaskNode node : processDag.getNodes()) { + dag.addNode(node.getName(), node); + } + } + + if (CollectionUtils.isNotEmpty(processDag.getEdges())) { + for (TaskNodeRelation edge : processDag.getEdges()) { + dag.addEdge(edge.getStartNode(), edge.getEndNode()); + } + } + + return dag; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java new file mode 100644 index 0000000000..08cdbf6245 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.commons.lang3.StringUtils; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import java.text.MessageFormat; +import java.util.Map; + +/** + * base service + */ +public class BaseService { + + /** + * check admin + * + * @param user + * @return + */ + protected boolean isAdmin(User user) { + return user.getUserType() == UserType.ADMIN_USER; + } + + /** + * check admin + * + * @param loginUser + * @param result + * @return + */ + protected boolean checkAdmin(User loginUser, Map result) { + //only admin can operate + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return true; + } + return false; + } + + /** + * put message to map + * + * @param result + * @param status + * @param statusParams + */ + protected void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } + } + + /** + * put message to result object + * + * @param result + * @param status + */ + protected void putMsg(Result result, Status status, Object... statusParams) { + result.setCode(status.getCode()); + + if (statusParams != null && statusParams.length > 0) { + result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.setMsg(status.getMsg()); + } + + } + + /** + * get cookie info by name + * @param request + * @param name + * @return get cookie info + */ + public static Cookie getCookie(HttpServletRequest request, String name) { + Cookie[] cookies = request.getCookies(); + if (cookies != null && cookies.length > 0) { + for (Cookie cookie : cookies) { + if (StringUtils.equalsIgnoreCase(name, cookie.getName())) { + return cookie; + } + } + } + + return null; + } + + /** + * create tenant dir if not exists + * @param tenantCode + * @throws Exception + */ + protected void createTenantDirIfNotExists(String tenantCode)throws Exception{ + + String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); + String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); + /** + * init resource path and udf path + */ + HadoopUtils.getInstance().mkdir(resourcePath); + HadoopUtils.getInstance().mkdir(udfsPath); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java new file mode 100644 index 0000000000..42039136d8 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java @@ -0,0 +1,416 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + + +import org.apache.dolphinscheduler.api.dto.CommandStateCount; +import org.apache.dolphinscheduler.api.dto.DefineUserDto; +import org.apache.dolphinscheduler.api.dto.TaskCountDto; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.queue.ITaskQueue; +import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.commons.lang3.StringUtils; +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.mapper.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.text.MessageFormat; +import java.util.*; + +/** + * data analysis service + */ +@Service +public class DataAnalysisService { + + private static final Logger logger = LoggerFactory.getLogger(DataAnalysisService.class); + + @Autowired + ProjectMapper projectMapper; + + @Autowired + ProjectService projectService; + + @Autowired + ProcessInstanceMapper processInstanceMapper; + + @Autowired + ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + CommandMapper commandMapper; + + @Autowired + ErrorCommandMapper errorCommandMapper; + + @Autowired + TaskInstanceMapper taskInstanceMapper; + + @Autowired + ProcessDao processDao; + + /** + * statistical task instance status data + * + * @param loginUser + * @param projectId + * @param startDate + * @param endDate + * @return + */ + public Map countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) { + + Map result = new HashMap<>(5); + if(projectId != 0){ + Project project = projectMapper.selectById(projectId); + result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); + + if (getResultStatus(result)){ + return result; + } + } + + /** + * find all the task lists in the project under the user + * statistics based on task status execution, failure, completion, wait, total + */ + Date start = null; + Date end = null; + + try { + start = DateUtils.getScheduleDate(startDate); + end = DateUtils.getScheduleDate(endDate); + } catch (Exception e) { + logger.error(e.getMessage(),e); + putErrorRequestParamsMsg(result); + return result; + } + + List taskInstanceStateCounts = + taskInstanceMapper.countTaskInstanceStateByUser(loginUser.getId(), + loginUser.getUserType(), start, end, String.valueOf(projectId)); + + TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts); + if (taskInstanceStateCounts != null) { + result.put(Constants.DATA_LIST, taskCountResult); + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.TASK_INSTANCE_STATE_COUNT_ERROR); + } + return result; + } + + private void putErrorRequestParamsMsg(Map result) { + result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); + } + + /** + * statistical process instance status data + * + * @param loginUser + * @param projectId + * @param startDate + * @param endDate + * @return + */ + public Map countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) { + + Map result = new HashMap<>(5); + if(projectId != 0){ + Project project = projectMapper.selectById(projectId); + result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); + + if (getResultStatus(result)){ + return result; + } + } + + Date start = null; + Date end = null; + try { + start = DateUtils.getScheduleDate(startDate); + end = DateUtils.getScheduleDate(endDate); + } catch (Exception e) { + logger.error(e.getMessage(),e); + putErrorRequestParamsMsg(result); + return result; + } + + List projectIds = new ArrayList<>(); + if(projectId !=0){ + projectIds.add(projectId); + }else if(loginUser.getUserType() == UserType.GENERAL_USER){ + projectIds = processDao.getProjectIdListHavePerm(loginUser.getId()); + + } + Integer[] projectIdArray = projectIds.toArray(new Integer[projectIds.size()]); + + List processInstanceStateCounts = + processInstanceMapper.countInstanceStateByUser(start, end, + projectIdArray); + + TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts); + if (processInstanceStateCounts != null) { + result.put(Constants.DATA_LIST, taskCountResult); + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.COUNT_PROCESS_INSTANCE_STATE_ERROR); + } + return result; + } + + + /** + * statistics the process definition quantities of certain person + * + * @param loginUser + * @param projectId + * @return + */ + public Map countDefinitionByUser(User loginUser, int projectId) { + Map result = new HashMap<>(); + + + Integer[] projectIdArray = new Integer[1]; + projectIdArray[0] = projectId; + List defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser( + loginUser.getId(), projectIdArray); + + DefineUserDto dto = new DefineUserDto(defineGroupByUsers); + result.put(Constants.DATA_LIST, dto); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * + * @param result + * @param status + */ + private void putMsg(Map result, Status status) { + result.put(Constants.STATUS, status); + result.put(Constants.MSG, status.getMsg()); + } + + /** + * get result status + * @param result + * @return + */ + private boolean getResultStatus(Map result) { + Status resultEnum = (Status) result.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return true; + } + return false; + } + + /** + * statistical command status data + * + * @param loginUser + * @param projectId + * @param startDate + * @param endDate + * @return + */ + public Map countCommandState(User loginUser, int projectId, String startDate, String endDate) { + + Map result = new HashMap<>(5); + if(projectId != 0){ + Project project = projectMapper.selectById(projectId); + result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); + + if (getResultStatus(result)){ + return result; + } + } + + /** + * find all the task lists in the project under the user + * statistics based on task status execution, failure, completion, wait, total + */ + Date start = null; + Date end = null; + + try { + start = DateUtils.getScheduleDate(startDate); + end = DateUtils.getScheduleDate(endDate); + } catch (Exception e) { + logger.error(e.getMessage(),e); + putErrorRequestParamsMsg(result); + return result; + } + + List projectIds = new ArrayList<>(); + if(projectId !=0){ + projectIds.add(projectId); + }else if(loginUser.getUserType() == UserType.GENERAL_USER){ + projectIds = processDao.getProjectIdListHavePerm(loginUser.getId()); + + } + Integer[] projectIdArray = projectIds.toArray(new Integer[projectIds.size()]); + // count command state + List commandStateCounts = + commandMapper.countCommandState( + loginUser.getId(), + start, + end, + projectIdArray); + + // count error command state + List errorCommandStateCounts = + errorCommandMapper.countCommandState( + start, end, projectIdArray); + + // + Map> dataMap = new HashMap<>(); + + Map commonCommand = new HashMap<>(); + commonCommand.put("commandState",0); + commonCommand.put("errorCommandState",0); + + + // init data map +// dataMap.put(ExecutionStatus.SUBMITTED_SUCCESS,commonCommand); +// dataMap.put(ExecutionStatus.RUNNING_EXEUTION,commonCommand); +// dataMap.put(ExecutionStatus.READY_PAUSE,commonCommand); +// dataMap.put(ExecutionStatus.PAUSE,commonCommand); +// dataMap.put(ExecutionStatus.READY_STOP,commonCommand); +// dataMap.put(ExecutionStatus.STOP,commonCommand); +// dataMap.put(ExecutionStatus.FAILURE,commonCommand); +// dataMap.put(ExecutionStatus.SUCCESS,commonCommand); +// dataMap.put(ExecutionStatus.NEED_FAULT_TOLERANCE,commonCommand); +// dataMap.put(ExecutionStatus.KILL,commonCommand); +// dataMap.put(ExecutionStatus.WAITTING_THREAD,commonCommand); +// dataMap.put(ExecutionStatus.WAITTING_DEPEND,commonCommand); + + // put command state + for (CommandCount executeStatusCount : commandStateCounts){ + Map commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType())); + commandStateCountsMap.put("commandState", executeStatusCount.getCount()); + dataMap.put(executeStatusCount.getCommandType(),commandStateCountsMap); + } + + // put error command state + for (CommandCount errorExecutionStatus : errorCommandStateCounts){ + Map errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType())); + errorCommandStateCountsMap.put("errorCommandState",errorExecutionStatus.getCount()); + dataMap.put(errorExecutionStatus.getCommandType(),errorCommandStateCountsMap); + } + + List list = new ArrayList<>(); + Iterator>> iterator = dataMap.entrySet().iterator(); + while (iterator.hasNext()){ + Map.Entry> next = iterator.next(); + CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get("errorCommandState"), + next.getValue().get("commandState"),next.getKey()); + list.add(commandStateCount); + } + + result.put(Constants.DATA_LIST, list); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * count queue state + * @param loginUser + * @param projectId + * @return + */ + public Map countQueueState(User loginUser, int projectId) { + Map result = new HashMap<>(5); + if(projectId != 0){ + Project project = projectMapper.selectById(projectId); + result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); + + if (getResultStatus(result)){ + return result; + } + } + + ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); + List tasksQueueList = tasksQueue.getAllTasks(org.apache.dolphinscheduler.common.Constants.SCHEDULER_TASKS_QUEUE); + List tasksKillList = tasksQueue.getAllTasks(org.apache.dolphinscheduler.common.Constants.SCHEDULER_TASKS_KILL); + + Map dataMap = new HashMap<>(); + if (loginUser.getUserType() == UserType.ADMIN_USER){ + dataMap.put("taskQueue",tasksQueueList.size()); + dataMap.put("taskKill",tasksKillList.size()); + + result.put(Constants.DATA_LIST, dataMap); + putMsg(result, Status.SUCCESS); + return result; + } + + int[] tasksQueueIds = new int[tasksQueueList.size()]; + int[] tasksKillIds = new int[tasksKillList.size()]; + + int i =0; + for (String taskQueueStr : tasksQueueList){ + if (StringUtils.isNotEmpty(taskQueueStr)){ + String[] splits = taskQueueStr.split("_"); + if (splits.length == 4){ + tasksQueueIds[i++]=Integer.parseInt(splits[3]); + } + } + } + + i = 0; + for (String taskKillStr : tasksKillList){ + if (StringUtils.isNotEmpty(taskKillStr)){ + String[] splits = taskKillStr.split("-"); + if (splits.length == 2){ + tasksKillIds[i++]=Integer.parseInt(splits[1]); + } + } + } + Integer taskQueueCount = 0; + Integer taskKillCount = 0; + int[] projectIds = new int[1]; + projectIds[0] = projectId; + + if (tasksQueueIds.length != 0){ + taskQueueCount = taskInstanceMapper.countTask( + loginUser.getId(),loginUser.getUserType(),projectIds, + tasksQueueIds); + } + + if (tasksKillIds.length != 0){ + taskKillCount = taskInstanceMapper.countTask(loginUser.getId(),loginUser.getUserType(), projectIds, tasksKillIds); + } + + + + dataMap.put("taskQueue",taskQueueCount); + dataMap.put("taskKill",taskKillCount); + + result.put(Constants.DATA_LIST, dataMap); + putMsg(result, Status.SUCCESS); + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java new file mode 100644 index 0000000000..2f325d938e --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java @@ -0,0 +1,690 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.job.db.*; +import org.apache.dolphinscheduler.common.utils.CommonUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; +import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.*; + +import static org.apache.dolphinscheduler.common.utils.PropertyUtils.getString; + +/** + * datasource service + */ +@Service +public class DataSourceService extends BaseService{ + + private static final Logger logger = LoggerFactory.getLogger(DataSourceService.class); + + public static final String NAME = "name"; + public static final String NOTE = "note"; + public static final String TYPE = "type"; + public static final String HOST = "host"; + public static final String PORT = "port"; + public static final String PRINCIPAL = "principal"; + public static final String DATABASE = "database"; + public static final String USER_NAME = "userName"; + public static final String PASSWORD = org.apache.dolphinscheduler.common.Constants.PASSWORD; + public static final String OTHER = "other"; + + + @Autowired + private DataSourceMapper dataSourceMapper; + + + @Autowired + private DataSourceUserMapper datasourceUserMapper; + + /** + * create data source + * + * @param loginUser + * @param name + * @param desc + * @param type + * @param parameter + * @return + */ + public Map createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { + + Map result = new HashMap<>(5); + // check name can use or not + if (checkName(name, result)) { + return result; + } + Boolean isConnection = checkConnection(type, parameter); + if (!isConnection) { + logger.info("connect failed, type:{}, parameter:{}", type, parameter); + putMsg(result, Status.DATASOURCE_CONNECT_FAILED); + return result; + } + + BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter); + if (datasource == null) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, parameter); + return result; + } + + // build datasource + DataSource dataSource = new DataSource(); + Date now = new Date(); + + dataSource.setName(name.trim()); + dataSource.setNote(desc); + dataSource.setUserId(loginUser.getId()); + dataSource.setUserName(loginUser.getUserName()); + dataSource.setType(type); + dataSource.setConnectionParams(parameter); + dataSource.setCreateTime(now); + dataSource.setUpdateTime(now); + dataSourceMapper.insert(dataSource); + + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * updateProcessInstance datasource + * + * @param loginUser + * @param name + * @param desc + * @param type + * @param parameter + * @return + */ + public Map updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { + + Map result = new HashMap<>(); + // determine whether the data source exists + DataSource dataSource = dataSourceMapper.selectById(id); + if (dataSource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + + //check name can use or not + if(!name.trim().equals(dataSource.getName()) && checkName(name, result)){ + return result; + } + + Boolean isConnection = checkConnection(type, parameter); + if (!isConnection) { + logger.info("connect failed, type:{}, parameter:{}", type, parameter); + putMsg(result, Status.DATASOURCE_CONNECT_FAILED); + return result; + } + Date now = new Date(); + + dataSource.setName(name.trim()); + dataSource.setNote(desc); + dataSource.setUserName(loginUser.getUserName()); + dataSource.setType(type); + dataSource.setConnectionParams(parameter); + dataSource.setUpdateTime(now); + dataSourceMapper.updateById(dataSource); + putMsg(result, Status.SUCCESS); + return result; + } + + private boolean checkName(String name, Map result) { + List queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim()); + if (queryDataSource != null && queryDataSource.size() > 0) { + putMsg(result, Status.DATASOURCE_EXIST); + return true; + } + return false; + } + + + /** + * updateProcessInstance datasource + */ + public Map queryDataSource(int id) { + + Map result = new HashMap(5); + DataSource dataSource = dataSourceMapper.selectById(id); + if (dataSource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + // type + String dataSourceType = dataSource.getType().toString(); + // name + String dataSourceName = dataSource.getName(); + // desc + String desc = dataSource.getNote(); + // parameter + String parameter = dataSource.getConnectionParams(); + + BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter); + String database = datasourceForm.getDatabase(); + // jdbc connection params + String other = datasourceForm.getOther(); + String address = datasourceForm.getAddress(); + + String[] hostsPorts = getHostsAndPort(address); + // ip host + String host = hostsPorts[0]; + // prot + String port = hostsPorts[1]; + String separator = ""; + + switch (dataSource.getType()) { + case HIVE: + case SQLSERVER: + separator = ";"; + break; + case MYSQL: + case POSTGRESQL: + case CLICKHOUSE: + case ORACLE: + separator = "&"; + break; + default: + separator = "&"; + break; + } + + Map otherMap = new LinkedHashMap(); + if (other != null) { + String[] configs = other.split(separator); + for (String config : configs) { + otherMap.put(config.split("=")[0], config.split("=")[1]); + } + + } + + Map map = new HashMap<>(10); + map.put(NAME, dataSourceName); + map.put(NOTE, desc); + map.put(TYPE, dataSourceType); + map.put(HOST, host); + map.put(PORT, port); + map.put(PRINCIPAL, datasourceForm.getPrincipal()); + map.put(DATABASE, database); + map.put(USER_NAME, datasourceForm.getUser()); + map.put(PASSWORD, datasourceForm.getPassword()); + map.put(OTHER, otherMap); + result.put(Constants.DATA_LIST, map); + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * query datasource list by keyword + * + * @param loginUser + * @param searchVal + * @param pageNo + * @param pageSize + * @return + */ + public Map queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(); + IPage dataSourceList = null; + Page dataSourcePage = new Page(pageNo, pageSize); + + if (isAdmin(loginUser)) { + dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal); + }else{ + dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal); + } + + List dataSources = dataSourceList.getRecords(); + handlePasswd(dataSources); + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + pageInfo.setTotalCount((int)(dataSourceList.getTotal())); + pageInfo.setLists(dataSources); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * get list paging + * + * @param loginUser + * @param searchVal + * @param pageSize + * @param pageInfo + * @return + */ + private List getDataSources(User loginUser, String searchVal, Integer pageSize, PageInfo pageInfo) { + IPage dataSourceList = null; + Page dataSourcePage = new Page(pageInfo.getStart(), pageSize); + + if (isAdmin(loginUser)) { + dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal); + }else{ + dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal); + } + List dataSources = dataSourceList.getRecords(); + + handlePasswd(dataSources); + return dataSources; + } + + + /** + * handle datasource connection password for safety + * @param dataSourceList + */ + private void handlePasswd(List dataSourceList) { + + for (DataSource dataSource : dataSourceList) { + + String connectionParams = dataSource.getConnectionParams(); + JSONObject object = JSONObject.parseObject(connectionParams); + object.put(org.apache.dolphinscheduler.common.Constants.PASSWORD, org.apache.dolphinscheduler.common.Constants.XXXXXX); + dataSource.setConnectionParams(JSONUtils.toJson(object)); + + } + } + + /** + * query data resource list + * + * @param loginUser + * @param type + * @return + */ + public Map queryDataSourceList(User loginUser, Integer type) { + Map result = new HashMap<>(5); + + List datasourceList; + + if (isAdmin(loginUser)) { + datasourceList = dataSourceMapper.listAllDataSourceByType(type); + }else{ + datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type); + } + + result.put(Constants.DATA_LIST, datasourceList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify datasource exists + * + * @param loginUser + * @param name + * @return + */ + public Result verifyDataSourceName(User loginUser, String name) { + Result result = new Result(); + List dataSourceList = dataSourceMapper.queryDataSourceByName(name); + if (dataSourceList != null && dataSourceList.size() > 0) { + logger.error("datasource name:{} has exist, can't create again.", name); + putMsg(result, Status.DATASOURCE_EXIST); + } else { + putMsg(result, Status.SUCCESS); + } + + return result; + } + + /** + * get connection + * + * @param dbType + * @param parameter + * @return + */ + private Connection getConnection(DbType dbType, String parameter) { + Connection connection = null; + BaseDataSource datasource = null; + try { + switch (dbType) { + case POSTGRESQL: + datasource = JSONObject.parseObject(parameter, PostgreDataSource.class); + Class.forName(Constants.ORG_POSTGRESQL_DRIVER); + break; + case MYSQL: + datasource = JSONObject.parseObject(parameter, MySQLDataSource.class); + Class.forName(Constants.COM_MYSQL_JDBC_DRIVER); + break; + case HIVE: + case SPARK: + if (CommonUtils.getKerberosStartupState()) { + System.setProperty(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF, + getString(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF_PATH)); + Configuration configuration = new Configuration(); + configuration.set(org.apache.dolphinscheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); + UserGroupInformation.setConfiguration(configuration); + UserGroupInformation.loginUserFromKeytab(getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_USERNAME), + getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH)); + } + if (dbType == DbType.HIVE){ + datasource = JSONObject.parseObject(parameter, HiveDataSource.class); + }else if (dbType == DbType.SPARK){ + datasource = JSONObject.parseObject(parameter, SparkDataSource.class); + } + Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); + break; + case CLICKHOUSE: + datasource = JSONObject.parseObject(parameter, ClickHouseDataSource.class); + Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER); + break; + case ORACLE: + datasource = JSONObject.parseObject(parameter, OracleDataSource.class); + Class.forName(Constants.COM_ORACLE_JDBC_DRIVER); + break; + case SQLSERVER: + datasource = JSONObject.parseObject(parameter, SQLServerDataSource.class); + Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER); + break; + default: + break; + } + if(datasource != null){ + connection = DriverManager.getConnection(datasource.getJdbcUrl(), datasource.getUser(), datasource.getPassword()); + } + } catch (Exception e) { + logger.error(e.getMessage(),e); + } + return connection; + } + + + /** + * check connection + * + * @param type + * @param parameter + * @return + */ + public boolean checkConnection(DbType type, String parameter) { + Boolean isConnection = false; + Connection con = getConnection(type, parameter); + if (con != null) { + isConnection = true; + try { + con.close(); + } catch (SQLException e) { + logger.error("close connection fail at DataSourceService::checkConnection()", e); + } + } + return isConnection; + } + + + /** + * test connection + * + * @param loginUser + * @param id + * @return + */ + public boolean connectionTest(User loginUser, int id) { + DataSource dataSource = dataSourceMapper.selectById(id); + return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); + } + + /** + * build paramters + * + * @param name + * @param desc + * @param type + * @param host + * @param port + * @param database + * @param userName + * @param password + * @param other + * @return + */ + public String buildParameter(String name, String desc, DbType type, String host, + String port, String database,String principal,String userName, + String password, String other) { + + String address = buildAddress(type, host, port); + + String jdbcUrl = address + "/" + database; + if (CommonUtils.getKerberosStartupState() && + (type == DbType.HIVE || type == DbType.SPARK)){ + jdbcUrl += ";principal=" + principal; + } + + String separator = ""; + if (Constants.MYSQL.equals(type.name()) + || Constants.POSTGRESQL.equals(type.name()) + || Constants.CLICKHOUSE.equals(type.name()) + || Constants.ORACLE.equals(type.name())) { + separator = "&"; + } else if (Constants.HIVE.equals(type.name()) + || Constants.SPARK.equals(type.name()) + || Constants.SQLSERVER.equals(type.name())) { + separator = ";"; + } + + Map parameterMap = new LinkedHashMap(6); + parameterMap.put(Constants.ADDRESS, address); + parameterMap.put(Constants.DATABASE, database); + parameterMap.put(Constants.JDBC_URL, jdbcUrl); + parameterMap.put(Constants.USER, userName); + parameterMap.put(Constants.PASSWORD, password); + if (CommonUtils.getKerberosStartupState() && + (type == DbType.HIVE || type == DbType.SPARK)){ + parameterMap.put(Constants.PRINCIPAL,principal); + } + if (other != null && !"".equals(other)) { + Map map = JSONObject.parseObject(other, new TypeReference>() { + }); + if (map.size() > 0) { + Set keys = map.keySet(); + StringBuilder otherSb = new StringBuilder(); + for (String key : keys) { + otherSb.append(String.format("%s=%s%s", key, map.get(key), separator)); + + } + otherSb.deleteCharAt(otherSb.length() - 1); + parameterMap.put(Constants.OTHER, otherSb); + } + + } + + if(logger.isDebugEnabled()){ + logger.info("parameters map-----" + JSONObject.toJSONString(parameterMap)); + } + return JSONObject.toJSONString(parameterMap); + + + } + + private String buildAddress(DbType type, String host, String port) { + StringBuilder sb = new StringBuilder(); + if (Constants.MYSQL.equals(type.name())) { + sb.append(Constants.JDBC_MYSQL); + sb.append(host).append(":").append(port); + } else if (Constants.POSTGRESQL.equals(type.name())) { + sb.append(Constants.JDBC_POSTGRESQL); + sb.append(host).append(":").append(port); + } else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) { + sb.append(Constants.JDBC_HIVE_2); + String[] hostArray = host.split(","); + if (hostArray.length > 0) { + for (String zkHost : hostArray) { + sb.append(String.format("%s:%s,", zkHost, port)); + } + sb.deleteCharAt(sb.length() - 1); + } + } else if (Constants.CLICKHOUSE.equals(type.name())) { + sb.append(Constants.JDBC_CLICKHOUSE); + sb.append(host).append(":").append(port); + } else if (Constants.ORACLE.equals(type.name())) { + sb.append(Constants.JDBC_ORACLE); + sb.append(host).append(":").append(port); + } else if (Constants.SQLSERVER.equals(type.name())) { + sb.append(Constants.JDBC_SQLSERVER); + sb.append(host).append(":").append(port); + } + + return sb.toString(); + } + + /** + * delete datasource + * + * @param loginUser + * @param datasourceId + * @return + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public Result delete(User loginUser, int datasourceId) { + Result result = new Result(); + try { + //query datasource by id + DataSource dataSource = dataSourceMapper.selectById(datasourceId); + if(dataSource == null){ + logger.error("resource id {} not exist", datasourceId); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + if(loginUser.getId() != dataSource.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER){ + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + dataSourceMapper.deleteById(datasourceId); + datasourceUserMapper.deleteByDatasourceId(datasourceId); + putMsg(result, Status.SUCCESS); + } catch (Exception e) { + logger.error("delete datasource fail",e); + throw new RuntimeException("delete datasource fail"); + } + return result; + } + + /** + * unauthorized datasource + * + * @param loginUser + * @param userId + * @return + */ + public Map unauthDatasource(User loginUser, Integer userId) { + + Map result = new HashMap<>(); + //only admin operate + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + /** + * query all data sources except userId + */ + List resultList = new ArrayList<>(); + List datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId); + Set datasourceSet = null; + if (datasourceList != null && datasourceList.size() > 0) { + datasourceSet = new HashSet<>(datasourceList); + + List authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId); + + Set authedDataSourceSet = null; + if (authedDataSourceList != null && authedDataSourceList.size() > 0) { + authedDataSourceSet = new HashSet<>(authedDataSourceList); + datasourceSet.removeAll(authedDataSourceSet); + + } + resultList = new ArrayList<>(datasourceSet); + } + result.put(Constants.DATA_LIST, resultList); + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * authorized datasource + * + * @param loginUser + * @param userId + * @return + */ + public Map authedDatasource(User loginUser, Integer userId) { + Map result = new HashMap<>(5); + + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + List authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId); + result.put(Constants.DATA_LIST, authedDatasourceList); + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * get host and port by address + * + * @param address + * @return + */ + private String[] getHostsAndPort(String address) { + String[] result = new String[2]; + String[] tmpArray = address.split(org.apache.dolphinscheduler.common.Constants.DOUBLE_SLASH); + String hostsAndPorts = tmpArray[tmpArray.length - 1]; + StringBuilder hosts = new StringBuilder(); + String[] hostPortArray = hostsAndPorts.split(org.apache.dolphinscheduler.common.Constants.COMMA); + String port = hostPortArray[0].split(org.apache.dolphinscheduler.common.Constants.COLON)[1]; + for (String hostPort : hostPortArray) { + hosts.append(hostPort.split(org.apache.dolphinscheduler.common.Constants.COLON)[0]).append(org.apache.dolphinscheduler.common.Constants.COMMA); + } + hosts.deleteCharAt(hosts.length() - 1); + result[0] = hosts.toString(); + result[1] = port; + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java new file mode 100644 index 0000000000..bac3a48038 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java @@ -0,0 +1,540 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + + +import org.apache.dolphinscheduler.api.enums.ExecuteType; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.text.ParseException; +import java.util.*; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * executor service + */ +@Service +public class ExecutorService extends BaseService{ + + private static final Logger logger = LoggerFactory.getLogger(ExecutorService.class); + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProjectService projectService; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + private ProcessDefinitionService processDefinitionService; + + + @Autowired + private ProcessInstanceMapper processInstanceMapper; + + + @Autowired + private ProcessDao processDao; + + /** + * execute process instance + * + * @param loginUser login user + * @param projectName project name + * @param processDefinitionId process Definition Id + * @param cronTime cron time + * @param commandType command type + * @param failureStrategy failuer strategy + * @param startNodeList start nodelist + * @param taskDependType node dependency type + * @param warningType warning type + * @param warningGroupId notify group id + * @param receivers receivers + * @param receiversCc receivers cc + * @param timeout timeout + * @return + */ + public Map execProcessInstance(User loginUser, String projectName, + int processDefinitionId, String cronTime, CommandType commandType, + FailureStrategy failureStrategy, String startNodeList, + TaskDependType taskDependType, WarningType warningType, int warningGroupId, + String receivers, String receiversCc, RunMode runMode, + Priority processInstancePriority, int workerGroupId, Integer timeout) throws ParseException { + Map result = new HashMap<>(5); + // timeout is valid + if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { + putMsg(result,Status.TASK_TIMEOUT_PARAMS_ERROR); + return result; + } + Project project = projectMapper.queryByName(projectName); + Map checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project); + if (checkResultAndAuth != null){ + return checkResultAndAuth; + } + + // check process define release state + ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId); + result = checkProcessDefinitionValid(processDefinition, processDefinitionId); + if(result.get(Constants.STATUS) != Status.SUCCESS){ + return result; + } + + if (!checkTenantSuitable(processDefinition)){ + logger.error("there is not any vaild tenant for the process definition: id:{},name:{}, ", + processDefinition.getId(), processDefinition.getName()); + putMsg(result, Status.TENANT_NOT_SUITABLE); + return result; + } + + /** + * create command + */ + int create = this.createCommand(commandType, processDefinitionId, + taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), + warningGroupId, runMode,processInstancePriority, workerGroupId); + if(create > 0 ){ + /** + * according to the process definition ID updateProcessInstance and CC recipient + */ + processDefinition.setReceivers(receivers); + processDefinition.setReceiversCc(receiversCc); + processDefinitionMapper.updateById(processDefinition); + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); + } + return result; + } + + + + /** + * check whether the process definition can be executed + * + * @param processDefinition + * @param processDefineId + * @return + */ + public Map checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId){ + Map result = new HashMap<>(5); + if (processDefinition == null) { + // check process definition exists + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,processDefineId); + } else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { + // check process definition online + putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE,processDefineId); + }else{ + result.put(Constants.STATUS, Status.SUCCESS); + } + return result; + } + + + + /** + * do action to process instance:pause, stop, repeat, recover from pause, recover from stop + * + * @param loginUser + * @param projectName + * @param processInstanceId + * @param executeType + * @return + */ + public Map execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = checkResultAndAuth(loginUser, projectName, project); + if (checkResult != null) { + return checkResult; + } + + ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); + if (processInstance == null) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + return result; + } + + ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); + if(executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE){ + result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId()); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + } + + checkResult = checkExecuteType(processInstance, executeType); + Status status = (Status) checkResult.get(Constants.STATUS); + if (status != Status.SUCCESS) { + return checkResult; + } + if (!checkTenantSuitable(processDefinition)){ + logger.error("there is not any vaild tenant for the process definition: id:{},name:{}, ", + processDefinition.getId(), processDefinition.getName()); + putMsg(result, Status.TENANT_NOT_SUITABLE); + } + + switch (executeType) { + case REPEAT_RUNNING: + result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING); + break; + case RECOVER_SUSPENDED_PROCESS: + result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS); + break; + case START_FAILURE_TASK_PROCESS: + result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS); + break; + case STOP: + if (processInstance.getState() == ExecutionStatus.READY_STOP) { + putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); + } else { + processInstance.setCommandType(CommandType.STOP); + processInstance.addHistoryCmd(CommandType.STOP); + processDao.updateProcessInstance(processInstance); + result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_STOP); + } + break; + case PAUSE: + if (processInstance.getState() == ExecutionStatus.READY_PAUSE) { + putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); + } else { + processInstance.setCommandType(CommandType.PAUSE); + processInstance.addHistoryCmd(CommandType.PAUSE); + processDao.updateProcessInstance(processInstance); + result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_PAUSE); + } + break; + default: + logger.error(String.format("unknown execute type : %s", executeType.toString())); + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); + + break; + } + return result; + } + + /** + * check tenant suitable + * @param processDefinition + * @return + */ + private boolean checkTenantSuitable(ProcessDefinition processDefinition) { + // checkTenantExists(); + Tenant tenant = processDao.getTenantForProcess(processDefinition.getTenantId(), + processDefinition.getUserId()); + if(tenant == null){ + return false; + } + return true; + } + + /** + * Check the state of process instance and the type of operation match + * + * @param processInstance + * @param executeType + * @return + */ + private Map checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) { + + Map result = new HashMap<>(5); + ExecutionStatus executionStatus = processInstance.getState(); + boolean checkResult = false; + switch (executeType) { + case PAUSE: + case STOP: + if (executionStatus.typeIsRunning()) { + checkResult = true; + } + break; + case REPEAT_RUNNING: + if (executionStatus.typeIsFinished()) { + checkResult = true; + } + break; + case START_FAILURE_TASK_PROCESS: + if (executionStatus.typeIsFailure()) { + checkResult = true; + } + break; + case RECOVER_SUSPENDED_PROCESS: + if (executionStatus.typeIsPause()|| executionStatus.typeIsCancel()) { + checkResult = true; + } + default: + break; + } + if (!checkResult) { + putMsg(result,Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); + } else { + putMsg(result, Status.SUCCESS); + } + return result; + } + + /** + * update process instance state + * + * @param processInstanceId + * @param executionStatus + * @return + */ + private Map updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { + Map result = new HashMap<>(5); + + int update = processDao.updateProcessInstanceState(processInstanceId, executionStatus); + if (update > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); + } + + return result; + } + + /** + * insert command, used in the implementation of the page, re run, recovery (pause / failure) execution + * + * @param loginUser + * @param instanceId + * @param processDefinitionId + * @param commandType + * @return + */ + private Map insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) { + Map result = new HashMap<>(5); + Command command = new Command(); + command.setCommandType(commandType); + command.setProcessDefinitionId(processDefinitionId); + command.setCommandParam(String.format("{\"%s\":%d}", + CMDPARAM_RECOVER_PROCESS_ID_STRING, instanceId)); + command.setExecutorId(loginUser.getId()); + + if(!processDao.verifyIsNeedCreateCommand(command)){ + putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND,processDefinitionId); + return result; + } + + int create = processDao.createCommand(command); + + if (create > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); + } + + return result; + } + + /** + * check if subprocesses are offline before starting process definition + * @param processDefineId + * @return + */ + public Map startCheckByProcessDefinedId(int processDefineId) { + Map result = new HashMap(); + + if (processDefineId == 0){ + logger.error("process definition id is null"); + putMsg(result,Status.REQUEST_PARAMS_NOT_VALID_ERROR,"process definition id"); + } + List ids = new ArrayList<>(); + processDao.recurseFindSubProcessId(processDefineId, ids); + Integer[] idArray = ids.toArray(new Integer[ids.size()]); + if (ids.size() > 0){ + List processDefinitionList; + processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); + if (processDefinitionList != null && processDefinitionList.size() > 0){ + for (ProcessDefinition processDefinition : processDefinitionList){ + /** + * if there is no online process, exit directly + */ + if (processDefinition.getReleaseState() != ReleaseState.ONLINE){ + putMsg(result,Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); + logger.info("not release process definition id: {} , name : {}", + processDefinition.getId(), processDefinition.getName()); + return result; + } + } + } + } + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query recipients and copyers by process definition id or processInstanceId + * + * @param processDefineId + * @return + */ + public Map getReceiverCc(Integer processDefineId,Integer processInstanceId) { + Map result = new HashMap<>(); + logger.info("processInstanceId {}",processInstanceId); + if(processDefineId == null && processInstanceId == null){ + throw new RuntimeException("You must set values for parameters processDefineId or processInstanceId"); + } + if(processDefineId == null && processInstanceId != null) { + ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId); + if (processInstance == null) { + throw new RuntimeException("processInstanceId is not exists"); + } + processDefineId = processInstance.getProcessDefinitionId(); + } + ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefineId); + if (processDefinition == null){ + throw new RuntimeException(String.format("processDefineId %d is not exists",processDefineId)); + } + + String receivers = processDefinition.getReceivers(); + String receiversCc = processDefinition.getReceiversCc(); + Map dataMap = new HashMap<>(); + dataMap.put(Constants.RECEIVERS,receivers); + dataMap.put(Constants.RECEIVERS_CC,receiversCc); + + result.put(Constants.DATA_LIST, dataMap); + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * create command + * + * @param commandType + * @param processDefineId + * @param nodeDep + * @param failureStrategy + * @param startNodeList + * @param schedule + * @param warningType + * @param excutorId + * @param warningGroupId + * @param runMode + * @return + * @throws ParseException + */ + private int createCommand(CommandType commandType, int processDefineId, + TaskDependType nodeDep, FailureStrategy failureStrategy, + String startNodeList, String schedule, WarningType warningType, + int excutorId, int warningGroupId, + RunMode runMode,Priority processInstancePriority, int workerGroupId) throws ParseException { + + /** + * instantiate command schedule instance + */ + Command command = new Command(); + + Map cmdParam = new HashMap<>(); + if(commandType == null){ + command.setCommandType(CommandType.START_PROCESS); + }else{ + command.setCommandType(commandType); + } + command.setProcessDefinitionId(processDefineId); + if(nodeDep != null){ + command.setTaskDependType(nodeDep); + } + if(failureStrategy != null){ + command.setFailureStrategy(failureStrategy); + } + + if(StringUtils.isNotEmpty(startNodeList)){ + cmdParam.put(CMDPARAM_START_NODE_NAMES, startNodeList); + } + if(warningType != null){ + command.setWarningType(warningType); + } + command.setCommandParam(JSONUtils.toJson(cmdParam)); + command.setExecutorId(excutorId); + command.setWarningGroupId(warningGroupId); + command.setProcessInstancePriority(processInstancePriority); + command.setWorkerGroupId(workerGroupId); + + Date start = null; + Date end = null; + if(StringUtils.isNotEmpty(schedule)){ + String[] interval = schedule.split(","); + if(interval.length == 2){ + start = DateUtils.getScheduleDate(interval[0]); + end = DateUtils.getScheduleDate(interval[1]); + } + } + + if(commandType == CommandType.COMPLEMENT_DATA){ + runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; + if(runMode == RunMode.RUN_MODE_SERIAL){ + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); + command.setCommandParam(JSONUtils.toJson(cmdParam)); + return processDao.createCommand(command); + }else if (runMode == RunMode.RUN_MODE_PARALLEL){ + int runCunt = 0; + while(!start.after(end)){ + runCunt += 1; + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); + command.setCommandParam(JSONUtils.toJson(cmdParam)); + processDao.createCommand(command); + start = DateUtils.getSomeDay(start, 1); + } + return runCunt; + } + }else{ + command.setCommandParam(JSONUtils.toJson(cmdParam)); + return processDao.createCommand(command); + } + + return 0; + } + + /** + * check result and auth + * + * @param loginUser + * @param projectName + * @param project + * @return + */ + private Map checkResultAndAuth(User loginUser, String projectName, Project project) { + // check project auth + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status status = (Status) checkResult.get(Constants.STATUS); + if (status != Status.SUCCESS) { + return checkResult; + } + return null; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java new file mode 100644 index 0000000000..192b4382a5 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.log.LogClient; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * log service + */ +@Service +public class LoggerService { + + private static final Logger logger = LoggerFactory.getLogger(LoggerService.class); + + @Autowired + private ProcessDao processDao; + + /** + * view log + * + * @param taskInstId + * @param skipLineNum + * @param limit + * @return + */ + public Result queryLog(int taskInstId, int skipLineNum, int limit) { + + TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + + if (taskInstance == null){ + return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg()); + } + + String host = taskInstance.getHost(); + if(StringUtils.isEmpty(host)){ + return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg()); + } + + + Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); + + logger.info("log host : {} , logPath : {} , logServer port : {}",host,taskInstance.getLogPath(),Constants.RPC_PORT); + + LogClient logClient = new LogClient(host, Constants.RPC_PORT); + String log = logClient.rollViewLog(taskInstance.getLogPath(),skipLineNum,limit); + result.setData(log); + logger.info(log); + + return result; + } + + /** + * get log size + * + * @param taskInstId + * @return + */ + public byte[] getLogBytes(int taskInstId) { + TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + if (taskInstance == null){ + throw new RuntimeException("task instance is null"); + } + String host = taskInstance.getHost(); + LogClient logClient = new LogClient(host, Constants.RPC_PORT); + return logClient.getLogBytes(taskInstance.getLogPath()); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java new file mode 100644 index 0000000000..9b26fcef22 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.ZookeeperMonitor; +import org.apache.dolphinscheduler.common.enums.ZKNodeType; +import org.apache.dolphinscheduler.dao.MonitorDBDao; +import org.apache.dolphinscheduler.common.model.MasterServer; +import org.apache.dolphinscheduler.dao.entity.MonitorRecord; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord; +import org.springframework.stereotype.Service; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * monitor service + */ +@Service +public class MonitorService extends BaseService{ + + /** + * query database state + * + * @return + */ + public Map queryDatabaseState(User loginUser) { + Map result = new HashMap<>(5); + + List monitorRecordList = MonitorDBDao.queryDatabaseState(); + + result.put(Constants.DATA_LIST, monitorRecordList); + putMsg(result, Status.SUCCESS); + + return result; + + } + + /** + * query master list + * + * @param loginUser + * @return + */ + public Map queryMaster(User loginUser) { + + Map result = new HashMap<>(5); + + List masterServers = getServerListFromZK(true); + result.put(Constants.DATA_LIST, masterServers); + putMsg(result,Status.SUCCESS); + + return result; + } + + /** + * query zookeeper state + * + * @return + */ + public Map queryZookeeperState(User loginUser) { + Map result = new HashMap<>(5); + + List zookeeperRecordList = ZookeeperMonitor.zookeeperInfoList(); + + result.put(Constants.DATA_LIST, zookeeperRecordList); + putMsg(result, Status.SUCCESS); + + return result; + + } + + + /** + * query master list + * + * @param loginUser + * @return + */ + public Map queryWorker(User loginUser) { + + Map result = new HashMap<>(5); + List workerServers = getServerListFromZK(false); + + result.put(Constants.DATA_LIST, workerServers); + putMsg(result,Status.SUCCESS); + + return result; + } + + public List getServerListFromZK(boolean isMaster){ + List servers = new ArrayList<>(); + ZookeeperMonitor zookeeperMonitor = null; + try{ + zookeeperMonitor = new ZookeeperMonitor(); + ZKNodeType zkNodeType = isMaster ? ZKNodeType.MASTER : ZKNodeType.WORKER; + servers = zookeeperMonitor.getServersList(zkNodeType); + }catch (Exception e){ + throw e; + }finally { + if(zookeeperMonitor != null){ + zookeeperMonitor.close(); + } + } + return servers; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java new file mode 100644 index 0000000000..d1d564e059 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java @@ -0,0 +1,1120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.dto.treeview.Instance; +import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.ProcessDao; +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.commons.lang3.ObjectUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.dolphinscheduler.api.utils.CheckUtils; +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.mapper.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.multipart.MultipartFile; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; +import java.io.BufferedOutputStream; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; + +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID; + +/** + * process definition service + */ +@Service +public class ProcessDefinitionService extends BaseDAGService { + + private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionService.class); + + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProjectService projectService; + + @Autowired + private ProcessDefinitionMapper processDefineMapper; + + @Autowired + private ProcessInstanceMapper processInstanceMapper; + + + @Autowired + private TaskInstanceMapper taskInstanceMapper; + + @Autowired + private ScheduleMapper scheduleMapper; + + @Autowired + private ProcessDao processDao; + + @Autowired + private DataSourceMapper dataSourceMapper; + + @Autowired + private WorkerGroupMapper workerGroupMapper; + + /** + * create process definition + * + * @param loginUser + * @param projectName + * @param name + * @param processDefinitionJson + * @param desc + * @param locations + * @param connects + * @return + */ + public Map createProcessDefinition(User loginUser, String projectName, String name, + String processDefinitionJson, String desc, String locations, String connects) throws JsonProcessingException { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + // check project auth + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + ProcessDefinition processDefine = new ProcessDefinition(); + Date now = new Date(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + Map checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); + if (checkProcessJson.get(Constants.STATUS) != Status.SUCCESS) { + return checkProcessJson; + } + + processDefine.setName(name); + processDefine.setReleaseState(ReleaseState.OFFLINE); + processDefine.setProjectId(project.getId()); + processDefine.setUserId(loginUser.getId()); + processDefine.setProcessDefinitionJson(processDefinitionJson); + processDefine.setDescription(desc); + processDefine.setLocations(locations); + processDefine.setConnects(connects); + processDefine.setTimeout(processData.getTimeout()); + processDefine.setTenantId(processData.getTenantId()); + + //custom global params + List globalParamsList = processData.getGlobalParams(); + if (globalParamsList != null && globalParamsList.size() > 0) { + Set globalParamsSet = new HashSet<>(globalParamsList); + globalParamsList = new ArrayList<>(globalParamsSet); + processDefine.setGlobalParamList(globalParamsList); + } + processDefine.setCreateTime(now); + processDefine.setUpdateTime(now); + processDefine.setFlag(Flag.YES); + processDefineMapper.insert(processDefine); + putMsg(result, Status.SUCCESS); + result.put("processDefinitionId",processDefine.getId()); + return result; + } + + + /** + * query proccess definition list + * + * @param loginUser + * @param projectName + * @return + */ + public Map queryProccessDefinitionList(User loginUser, String projectName) { + + HashMap result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + List resourceList = processDefineMapper.queryAllDefinitionList(project.getId()); + result.put(Constants.DATA_LIST, resourceList); + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * query proccess definition list paging + * + * @param loginUser + * @param projectName + * @param searchVal + * @param pageNo + * @param pageSize + * @param userId + * @return + */ + public Map queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + Page page = new Page(pageNo, pageSize); + IPage processDefinitionIPage = processDefineMapper.queryDefineListPaging( + page, searchVal, userId, project.getId()); + + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + pageInfo.setTotalCount((int)processDefinitionIPage.getTotal()); + pageInfo.setLists(processDefinitionIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query datail of process definition + * + * @param loginUser + * @param projectName + * @param processId + * @return + */ + public Map queryProccessDefinitionById(User loginUser, String projectName, Integer processId) { + + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + ProcessDefinition processDefinition = processDefineMapper.selectById(processId); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId); + } else { + result.put(Constants.DATA_LIST, processDefinition); + putMsg(result, Status.SUCCESS); + } + return result; + } + + /** + * update process definition + * + * @param loginUser + * @param projectName + * @param id + * @param name + * @param processDefinitionJson + * @param desc + * @param locations + * @param connects + * @return + */ + public Map updateProcessDefinition(User loginUser, String projectName, int id, String name, + String processDefinitionJson, String desc, + String locations, String connects) { + Map result = new HashMap<>(5); + + Project project = projectMapper.queryByName(projectName); + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + Map checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); + if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) { + return checkProcessJson; + } + ProcessDefinition processDefinition = processDao.findProcessDefineById(id); + if (processDefinition == null) { + // check process definition exists + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id); + return result; + } else if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { + // online can not permit edit + putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName()); + return result; + } else { + putMsg(result, Status.SUCCESS); + } + + ProcessDefinition processDefine = processDao.findProcessDefineById(id); + Date now = new Date(); + + processDefine.setId(id); + processDefine.setName(name); + processDefine.setReleaseState(ReleaseState.OFFLINE); + processDefine.setProjectId(project.getId()); + processDefine.setProcessDefinitionJson(processDefinitionJson); + processDefine.setDescription(desc); + processDefine.setLocations(locations); + processDefine.setConnects(connects); + processDefine.setTimeout(processData.getTimeout()); + processDefine.setTenantId(processData.getTenantId()); + + //custom global params + List globalParamsList = new ArrayList<>(); + if (processData.getGlobalParams() != null && processData.getGlobalParams().size() > 0) { + Set userDefParamsSet = new HashSet<>(processData.getGlobalParams()); + globalParamsList = new ArrayList<>(userDefParamsSet); + } + processDefine.setGlobalParamList(globalParamsList); + processDefine.setUpdateTime(now); + processDefine.setFlag(Flag.YES); + if (processDefineMapper.updateById(processDefine) > 0) { + putMsg(result, Status.SUCCESS); + + } else { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + return result; + } + + /** + * verify process definition name unique + * + * @param loginUser + * @param projectName + * @param name + * @return + */ + public Map verifyProccessDefinitionName(User loginUser, String projectName, String name) { + + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(), name); + if (processDefinition == null) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.PROCESS_INSTANCE_EXIST, name); + } + return result; + } + + /** + * delete process definition by id + * + * @param loginUser + * @param projectName + * @param processDefinitionId + * @return + */ + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) + public Map deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId); + + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionId); + return result; + } + + // Determine if the login user is the owner of the process definition + if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + // check process definition is already online + if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { + putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE,processDefinitionId); + return result; + } + + // get the timing according to the process definition + List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); + if (!schedules.isEmpty() && schedules.size() > 1) { + logger.warn("scheduler num is {},Greater than 1",schedules.size()); + putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR); + return result; + }else if(schedules.size() == 1){ + Schedule schedule = schedules.get(0); + if(schedule.getReleaseState() == ReleaseState.OFFLINE){ + scheduleMapper.deleteById(schedule.getId()); + }else if(schedule.getReleaseState() == ReleaseState.ONLINE){ + putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE,schedule.getId()); + return result; + } + } + + int delete = processDefineMapper.deleteById(processDefinitionId); + + if (delete > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR); + } + return result; + } + + /** + * batch delete process definition by ids + * + * @param loginUser + * @param projectName + * @param processDefinitionIds + * @return + */ + public Map batchDeleteProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds) { + + Map result = new HashMap<>(5); + + Map deleteReuslt = new HashMap<>(5); + + List deleteFailedIdList = new ArrayList(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + + if(StringUtils.isNotEmpty(processDefinitionIds)){ + String[] processInstanceIdArray = processDefinitionIds.split(","); + + for (String strProcessInstanceId:processInstanceIdArray) { + int processInstanceId = Integer.parseInt(strProcessInstanceId); + try { + deleteReuslt = deleteProcessDefinitionById(loginUser, projectName, processInstanceId); + if(!Status.SUCCESS.equals(deleteReuslt.get(Constants.STATUS))){ + deleteFailedIdList.add(processInstanceId); + logger.error((String)deleteReuslt.get(Constants.MSG)); + } + } catch (Exception e) { + deleteFailedIdList.add(processInstanceId); + } + } + } + + if(deleteFailedIdList.size() > 0){ + putMsg(result, Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR,StringUtils.join(deleteFailedIdList.toArray(),",")); + }else{ + putMsg(result, Status.SUCCESS); + } + return result; + } + + /** + * release process definition: online / offline + * + * @param loginUser + * @param projectName + * @param id + * @param releaseState + * @return + */ + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) + public Map releaseProcessDefinition(User loginUser, String projectName, int id, int releaseState) { + HashMap result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + ReleaseState state = ReleaseState.getEnum(releaseState); + ProcessDefinition processDefinition = processDefineMapper.selectById(id); + + switch (state) { + case ONLINE: { + processDefinition.setReleaseState(state); + processDefineMapper.updateById(processDefinition); + break; + } + case OFFLINE: { + processDefinition.setReleaseState(state); + processDefineMapper.updateById(processDefinition); + List scheduleList = scheduleMapper.selectAllByProcessDefineArray( + new int[]{processDefinition.getId()} + ); + + for(Schedule schedule:scheduleList){ + logger.info("set schedule offline, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id); + // set status + schedule.setReleaseState(ReleaseState.OFFLINE); + scheduleMapper.updateById(schedule); + SchedulerService.deleteSchedule(project.getId(), schedule.getId()); + } + break; + } + default: { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "releaseState"); + return result; + } + } + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * export process definition by id + * + * @param loginUser + * @param projectName + * @param processDefinitionId + * @return + */ + public void exportProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId, HttpServletResponse response) { + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus == Status.SUCCESS) { + ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId); + if (processDefinition != null) { + JSONObject jsonObject = JSONUtils.parseObject(processDefinition.getProcessDefinitionJson()); + JSONArray jsonArray = (JSONArray) jsonObject.get("tasks"); + for (int i = 0; i < jsonArray.size(); i++) { + JSONObject taskNode = jsonArray.getJSONObject(i); + if (taskNode.get("type") != null && taskNode.get("type") != "") { + String taskType = taskNode.getString("type"); + if(taskType.equals(TaskType.SQL.name()) || taskType.equals(TaskType.PROCEDURE.name())){ + JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params")); + DataSource dataSource = dataSourceMapper.selectById((Integer) sqlParameters.get("datasource")); + if (dataSource != null) { + sqlParameters.put("datasourceName", dataSource.getName()); + } + taskNode.put("params", sqlParameters); + } + } + } + jsonObject.put("tasks", jsonArray); + processDefinition.setProcessDefinitionJson(jsonObject.toString()); + + Map row = new LinkedHashMap<>(); + row.put("projectName", processDefinition.getProjectName()); + row.put("processDefinitionName", processDefinition.getName()); + row.put("processDefinitionJson", processDefinition.getProcessDefinitionJson()); + row.put("processDefinitionDesc", processDefinition.getDescription()); + row.put("processDefinitionLocations", processDefinition.getLocations()); + row.put("processDefinitionConnects", processDefinition.getConnects()); + + List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); + if (schedules.size() > 0) { + Schedule schedule = schedules.get(0); + row.put("scheduleWarningType", schedule.getWarningType()); + row.put("scheduleWarningGroupId", schedule.getWarningGroupId()); + row.put("scheduleStartTime", schedule.getStartTime()); + row.put("scheduleEndTime", schedule.getEndTime()); + row.put("scheduleCrontab", schedule.getCrontab()); + row.put("scheduleFailureStrategy", schedule.getFailureStrategy()); + row.put("scheduleReleaseState", schedule.getReleaseState()); + row.put("scheduleProcessInstancePriority", schedule.getProcessInstancePriority()); + if(schedule.getId() == -1){ + row.put("scheduleWorkerGroupId", -1); + }else{ + WorkerGroup workerGroup = workerGroupMapper.selectById(schedule.getWorkerGroupId()); + if(workerGroup != null){ + row.put("scheduleWorkerGroupName", workerGroup.getName()); + } + } + + } + String rowsJson = JSONUtils.toJsonString(row); + response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE); + response.setHeader("Content-Disposition", "attachment;filename="+processDefinition.getName()+".json"); + BufferedOutputStream buff = null; + ServletOutputStream out = null; + try { + out = response.getOutputStream(); + buff = new BufferedOutputStream(out); + buff.write(rowsJson.getBytes("UTF-8")); + buff.flush(); + buff.close(); + } catch (IOException e) { + e.printStackTrace(); + }finally { + try { + buff.close(); + out.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } + } + + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) + public Map importProcessDefinition(User loginUser, MultipartFile file) { + Map result = new HashMap<>(5); + + JSONObject json = null; + try(InputStreamReader inputStreamReader = new InputStreamReader( file.getInputStream(), "UTF-8" )) { + BufferedReader streamReader = new BufferedReader(inputStreamReader); + StringBuilder respomseStrBuilder = new StringBuilder(); + String inputStr = ""; + while ((inputStr = streamReader.readLine())!= null){ + respomseStrBuilder.append( inputStr ); + } + json = JSONObject.parseObject( respomseStrBuilder.toString() ); + if(json != null){ + String projectName = null; + String processDefinitionName = null; + String processDefinitionJson = null; + String processDefinitionDesc = null; + String processDefinitionLocations = null; + String processDefinitionConnects = null; + + String scheduleWarningType = null; + String scheduleWarningGroupId = null; + String scheduleStartTime = null; + String scheduleEndTime = null; + String scheduleCrontab = null; + String scheduleFailureStrategy = null; + String scheduleReleaseState = null; + String scheduleProcessInstancePriority = null; + String scheduleWorkerGroupId = null; + String scheduleWorkerGroupName = null; + + if (ObjectUtils.allNotNull(json.get("projectName"))) { + projectName = json.get("projectName").toString(); + } else { + putMsg(result, Status.DATA_IS_NULL, "processDefinitionName"); + } + if (ObjectUtils.allNotNull(json.get("processDefinitionName"))) { + processDefinitionName = json.get("processDefinitionName").toString(); + } else { + putMsg(result, Status.DATA_IS_NULL, "processDefinitionName"); + } + if (ObjectUtils.allNotNull(json.get("processDefinitionJson"))) { + processDefinitionJson = json.get("processDefinitionJson").toString(); + } else { + putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson"); + } + if (ObjectUtils.allNotNull(json.get("processDefinitionDesc"))) { + processDefinitionDesc = json.get("processDefinitionDesc").toString(); + } + if (ObjectUtils.allNotNull(json.get("processDefinitionLocations"))) { + processDefinitionLocations = json.get("processDefinitionLocations").toString(); + } + if (ObjectUtils.allNotNull(json.get("processDefinitionConnects"))) { + processDefinitionConnects = json.get("processDefinitionConnects").toString(); + } + + JSONObject jsonObject = JSONUtils.parseObject(processDefinitionJson); + JSONArray jsonArray = (JSONArray) jsonObject.get("tasks"); + for (int j = 0; j < jsonArray.size(); j++) { + JSONObject taskNode = jsonArray.getJSONObject(j); + JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params")); + List dataSources = dataSourceMapper.queryDataSourceByName(sqlParameters.getString("datasourceName")); + if (dataSources.size() > 0) { + DataSource dataSource = dataSources.get(0); + sqlParameters.put("datasource", dataSource.getId()); + } + taskNode.put("params", sqlParameters); + } + jsonObject.put("tasks", jsonArray); + + Map createProcessDefinitionResult = createProcessDefinition(loginUser,projectName,processDefinitionName,jsonObject.toString(),processDefinitionDesc,processDefinitionLocations,processDefinitionConnects); + Integer processDefinitionId = null; + if (ObjectUtils.allNotNull(createProcessDefinitionResult.get("processDefinitionId"))) { + processDefinitionId = Integer.parseInt(createProcessDefinitionResult.get("processDefinitionId").toString()); + } + if (ObjectUtils.allNotNull(json.get("scheduleCrontab")) && processDefinitionId != null) { + Date now = new Date(); + Schedule scheduleObj = new Schedule(); + scheduleObj.setProjectName(projectName); + scheduleObj.setProcessDefinitionId(processDefinitionId); + scheduleObj.setProcessDefinitionName(processDefinitionName); + scheduleObj.setCreateTime(now); + scheduleObj.setUpdateTime(now); + scheduleObj.setUserId(loginUser.getId()); + scheduleObj.setUserName(loginUser.getUserName()); + + scheduleCrontab = json.get("scheduleCrontab").toString(); + scheduleObj.setCrontab(scheduleCrontab); + if (ObjectUtils.allNotNull(json.get("scheduleStartTime"))) { + scheduleStartTime = json.get("scheduleStartTime").toString(); + scheduleObj.setStartTime(DateUtils.stringToDate(scheduleStartTime)); + } + if (ObjectUtils.allNotNull(json.get("scheduleEndTime"))) { + scheduleEndTime = json.get("scheduleEndTime").toString(); + scheduleObj.setEndTime(DateUtils.stringToDate(scheduleEndTime)); + } + if (ObjectUtils.allNotNull(json.get("scheduleWarningType"))) { + scheduleWarningType = json.get("scheduleWarningType").toString(); + scheduleObj.setWarningType(WarningType.valueOf(scheduleWarningType)); + } + if (ObjectUtils.allNotNull(json.get("scheduleWarningGroupId"))) { + scheduleWarningGroupId = json.get("scheduleWarningGroupId").toString(); + scheduleObj.setWarningGroupId(Integer.parseInt(scheduleWarningGroupId)); + } + if (ObjectUtils.allNotNull(json.get("scheduleFailureStrategy"))) { + scheduleFailureStrategy = json.get("scheduleFailureStrategy").toString(); + scheduleObj.setFailureStrategy(FailureStrategy.valueOf(scheduleFailureStrategy)); + } + if (ObjectUtils.allNotNull(json.get("scheduleReleaseState"))) { + scheduleReleaseState = json.get("scheduleReleaseState").toString(); + scheduleObj.setReleaseState(ReleaseState.valueOf(scheduleReleaseState)); + } + if (ObjectUtils.allNotNull(json.get("scheduleProcessInstancePriority"))) { + scheduleProcessInstancePriority = json.get("scheduleProcessInstancePriority").toString(); + scheduleObj.setProcessInstancePriority(Priority.valueOf(scheduleProcessInstancePriority)); + } + if (ObjectUtils.allNotNull(json.get("scheduleWorkerGroupId"))) { + scheduleWorkerGroupId = json.get("scheduleWorkerGroupId").toString(); + if(scheduleWorkerGroupId != null){ + scheduleObj.setWorkerGroupId(Integer.parseInt(scheduleWorkerGroupId)); + }else{ + if (ObjectUtils.allNotNull(json.get("scheduleWorkerGroupName"))) { + scheduleWorkerGroupName = json.get("scheduleWorkerGroupName").toString(); + List workerGroups = workerGroupMapper.queryWorkerGroupByName(scheduleWorkerGroupName); + if(workerGroups.size() > 0){ + scheduleObj.setWorkerGroupId(workerGroups.get(0).getId()); + } + } + } + } + scheduleMapper.insert(scheduleObj); + } + }else{ + putMsg(result, Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR); + return result; + } + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * check the process definition node meets the specifications + * + * @param processData + * @param processDefinitionJson + * @return + */ + public Map checkProcessNodeList(ProcessData processData, String processDefinitionJson) { + + Map result = new HashMap<>(5); + try { + if (processData == null) { + logger.error("process data is null"); + putMsg(result,Status.DATA_IS_NOT_VALID, processDefinitionJson); + return result; + } + + // Check whether the task node is normal + List taskNodes = processData.getTasks(); + + if (taskNodes == null) { + logger.error("process node info is empty"); + putMsg(result, Status.DATA_IS_NULL, processDefinitionJson); + return result; + } + + // check has cycle + if (graphHasCycle(taskNodes)) { + logger.error("process DAG has cycle"); + putMsg(result, Status.PROCESS_NODE_HAS_CYCLE); + return result; + } + + // check whether the process definition json is normal + for (TaskNode taskNode : taskNodes) { + if (!CheckUtils.checkTaskNodeParameters(taskNode.getParams(), taskNode.getType())) { + logger.error("task node {} parameter invalid", taskNode.getName()); + putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName()); + return result; + } + + // check extra params + CheckUtils.checkOtherParams(taskNode.getExtras()); + } + putMsg(result,Status.SUCCESS); + } catch (Exception e) { + result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + result.put(Constants.MSG, e.getMessage()); + } + return result; + } + + /** + * get task node details based on process definition + */ + public Map getTaskNodeListByDefinitionId(Integer defineId) throws Exception { + Map result = new HashMap<>(); + + ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); + if (processDefinition == null) { + logger.info("process define not exists"); + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition.getId()); + return result; + } + + + String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + List taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); + + result.put(Constants.DATA_LIST, taskNodeList); + putMsg(result, Status.SUCCESS); + + return result; + + } + + /** + * get task node details based on process definition + */ + public Map getTaskNodeListByDefinitionIdList(String defineIdList) throws Exception { + Map result = new HashMap<>(); + + + Map> taskNodeMap = new HashMap<>(); + String[] idList = defineIdList.split(","); + List definitionIdList = Arrays.asList(idList); + List processDefinitionList = processDefineMapper.queryDefinitionListByIdList( definitionIdList.toArray(new Integer[definitionIdList.size()])); + if (processDefinitionList == null || processDefinitionList.size() ==0) { + logger.info("process definition not exists"); + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList); + return result; + } + + for(ProcessDefinition processDefinition : processDefinitionList){ + String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + List taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); + taskNodeMap.put(processDefinition.getId(), taskNodeList); + } + + result.put(Constants.DATA_LIST, taskNodeMap); + putMsg(result, Status.SUCCESS); + + return result; + + } + + + /** + * query proccess definition all by project id + * + * @param projectId + * @return + */ + public Map queryProccessDefinitionAllByProjectId(Integer projectId) { + + HashMap result = new HashMap<>(5); + + List resourceList = processDefineMapper.queryAllDefinitionList(projectId); + result.put(Constants.DATA_LIST, resourceList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * Encapsulates the TreeView structure + * + * @param processId + * @param limit + * @return + */ + public Map viewTree(Integer processId, Integer limit) throws Exception { + Map result = new HashMap<>(); + + ProcessDefinition processDefinition = processDefineMapper.selectById(processId); + if (processDefinition == null) { + logger.info("process define not exists"); + throw new RuntimeException("process define not exists"); + } + DAG dag = genDagGraph(processDefinition); + /** + * nodes that is running + */ + Map> runningNodeMap = new ConcurrentHashMap<>(); + + /** + * nodes that is waiting torun + */ + Map> waitingRunningNodeMap = new ConcurrentHashMap<>(); + + /** + * List of process instances + */ + List processInstanceList = processInstanceMapper.queryByProcessDefineId(processId, limit); + + + if (limit > processInstanceList.size()) { + limit = processInstanceList.size(); + } + + TreeViewDto parentTreeViewDto = new TreeViewDto(); + parentTreeViewDto.setName("DAG"); + parentTreeViewDto.setType(""); + // Specify the process definition, because it is a TreeView for a process definition + + for (int i = limit - 1; i >= 0; i--) { + ProcessInstance processInstance = processInstanceList.get(i); + + Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime(); + parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), "", processInstance.getState().toString() + , processInstance.getStartTime(), endTime, processInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime()))); + } + + List parentTreeViewDtoList = new ArrayList<>(); + parentTreeViewDtoList.add(parentTreeViewDto); + // Here is the encapsulation task instance + for (String startNode : dag.getBeginNode()) { + runningNodeMap.put(startNode, parentTreeViewDtoList); + } + + while (Stopper.isRunning()) { + Set postNodeList = null; + Iterator>> iter = runningNodeMap.entrySet().iterator(); + while (iter.hasNext()) { + Map.Entry> en = iter.next(); + String nodeName = en.getKey(); + parentTreeViewDtoList = en.getValue(); + + TreeViewDto treeViewDto = new TreeViewDto(); + treeViewDto.setName(nodeName); + TaskNode taskNode = dag.getNode(nodeName); + treeViewDto.setType(taskNode.getType()); + + + //set treeViewDto instances + for (int i = limit - 1; i >= 0; i--) { + ProcessInstance processInstance = processInstanceList.get(i); + TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), nodeName); + if (taskInstance == null) { + treeViewDto.getInstances().add(new Instance(-1, "not running", "null")); + } else { + Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); + Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); + + int subProcessId = 0; + /** + * if process is sub process, the return sub id, or sub id=0 + */ + if (taskInstance.getTaskType().equals(TaskType.SUB_PROCESS.name())) { + String taskJson = taskInstance.getTaskJson(); + taskNode = JSON.parseObject(taskJson, TaskNode.class); + subProcessId = Integer.parseInt(JSON.parseObject( + taskNode.getParams()).getString(CMDPARAM_SUB_PROCESS_DEFINE_ID)); + } + treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString() + , taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId)); + } + } + for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) { + pTreeViewDto.getChildren().add(treeViewDto); + } + postNodeList = dag.getSubsequentNodes(nodeName); + if (postNodeList != null && postNodeList.size() > 0) { + for (String nextNodeName : postNodeList) { + List treeViewDtoList = waitingRunningNodeMap.get(nextNodeName); + if (treeViewDtoList != null && treeViewDtoList.size() > 0) { + treeViewDtoList.add(treeViewDto); + waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); + } else { + treeViewDtoList = new ArrayList<>(); + treeViewDtoList.add(treeViewDto); + waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); + } + } + } + runningNodeMap.remove(nodeName); + } + + if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) { + break; + } else { + runningNodeMap.putAll(waitingRunningNodeMap); + waitingRunningNodeMap.clear(); + } + } + result.put(Constants.DATA_LIST, parentTreeViewDto); + result.put(Constants.STATUS, Status.SUCCESS); + result.put(Constants.MSG, Status.SUCCESS.getMsg()); + return result; + } + + + /** + * Generate the DAG Graph based on the process definition id + * + * @param processDefinition + * @return + * @throws Exception + */ + private DAG genDagGraph(ProcessDefinition processDefinition) throws Exception { + + String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + List taskNodeList = processData.getTasks(); + + processDefinition.setGlobalParamList(processData.getGlobalParams()); + + + List taskNodeRelations = new ArrayList<>(); + + // Traverse node information and build relationships + for (TaskNode taskNode : taskNodeList) { + String preTasks = taskNode.getPreTasks(); + List preTasksList = JSONUtils.toList(preTasks, String.class); + + // If the dependency is not empty + if (preTasksList != null) { + for (String depNode : preTasksList) { + taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); + } + } + } + + ProcessDag processDag = new ProcessDag(); + processDag.setEdges(taskNodeRelations); + processDag.setNodes(taskNodeList); + + + // Generate concrete Dag to be executed + return genDagGraph(processDag); + + + } + + /** + * Generate the DAG of process + * + * @return DAG + */ + private DAG genDagGraph(ProcessDag processDag) { + DAG dag = new DAG<>(); + + /** + * Add the ndoes + */ + if (CollectionUtils.isNotEmpty(processDag.getNodes())) { + for (TaskNode node : processDag.getNodes()) { + dag.addNode(node.getName(), node); + } + } + + /** + * Add the edges + */ + if (CollectionUtils.isNotEmpty(processDag.getEdges())) { + for (TaskNodeRelation edge : processDag.getEdges()) { + dag.addEdge(edge.getStartNode(), edge.getEndNode()); + } + } + + return dag; + } + + + /** + * whether the graph has a ring + * + * @param taskNodeResponseList + * @return + */ + private boolean graphHasCycle(List taskNodeResponseList) { + DAG graph = new DAG<>(); + + // Fill the vertices + for (TaskNode taskNodeResponse : taskNodeResponseList) { + graph.addNode(taskNodeResponse.getName(), taskNodeResponse); + } + + // Fill edge relations + for (TaskNode taskNodeResponse : taskNodeResponseList) { + taskNodeResponse.getPreTasks(); + List preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(),String.class); + if (CollectionUtils.isNotEmpty(preTasks)) { + for (String preTask : preTasks) { + if (!graph.addEdge(preTask, taskNodeResponse.getName())) { + return true; + } + } + } + } + + return graph.hasCycle(); + } + +} + diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java new file mode 100644 index 0000000000..e7bf3526de --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java @@ -0,0 +1,723 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.dto.gantt.GanttDto; +import org.apache.dolphinscheduler.api.dto.gantt.Task; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.queue.ITaskQueue; +import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; +import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; +import org.apache.dolphinscheduler.dao.ProcessDao; +import com.alibaba.fastjson.JSON; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang3.StringUtils; +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.mapper.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.Charset; +import java.text.ParseException; +import java.util.*; +import java.util.stream.Collectors; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * process instance service + */ +@Service +public class ProcessInstanceService extends BaseDAGService { + + + private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceService.class); + + @Autowired + ProjectMapper projectMapper; + + @Autowired + ProjectService projectService; + + @Autowired + ProcessDao processDao; + + @Autowired + ProcessInstanceMapper processInstanceMapper; + + @Autowired + ProcessDefinitionMapper processDefineMapper; + + @Autowired + ProcessDefinitionService processDefinitionService; + + @Autowired + ExecutorService execService; + + @Autowired + TaskInstanceMapper taskInstanceMapper; + + @Autowired + LoggerService loggerService; + + @Autowired + WorkerGroupMapper workerGroupMapper; + + /** + * query process instance by id + * + * @param loginUser + * @param projectName + * @param processId + * @return + */ + public Map queryProcessInstanceById(User loginUser, String projectName, Integer processId) { + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId); + String workerGroupName = ""; + if(processInstance.getWorkerGroupId() == -1){ + workerGroupName = DEFAULT; + }else{ + WorkerGroup workerGroup = workerGroupMapper.selectById(processInstance.getWorkerGroupId()); + if(workerGroup != null){ + workerGroupName = DEFAULT; + }else{ + workerGroupName = workerGroup.getName(); + } + } + processInstance.setWorkerGroupName(workerGroupName); + ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); + processInstance.setReceivers(processDefinition.getReceivers()); + processInstance.setReceiversCc(processDefinition.getReceiversCc()); + result.put(Constants.DATA_LIST, processInstance); + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * paging query process instance list, filtering according to project, process definition, time range, keyword, process status + * + * @param loginUser + * @param projectName + * @param processDefineId + * @param startDate + * @param endDate + * @param searchVal + * @param stateType + * @param pageNo + * @param pageSize + * @return + */ + public Map queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, + String startDate, String endDate, + String searchVal, ExecutionStatus stateType, String host, + Integer pageNo, Integer pageSize) { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + int[] statusArray = null; + String statesStr = null; + // filter by state + if (stateType != null) { + statusArray = new int[]{stateType.ordinal()}; + } + if (statusArray != null) { + statesStr = Arrays.toString(statusArray).replace("[", "").replace("]", ""); + } + + Date start = null; + Date end = null; + try { + if (StringUtils.isNotEmpty(startDate)) { + start = DateUtils.getScheduleDate(startDate); + } + if (StringUtils.isNotEmpty(endDate)) { + end = DateUtils.getScheduleDate(endDate); + } + } catch (Exception e) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate"); + return result; + } + Page page = new Page(pageNo, pageSize); + + IPage processInstanceList = + processInstanceMapper.queryProcessInstanceListPaging(page, + project.getId(), processDefineId, searchVal, statusArray, host, start, end); + + Set exclusionSet = new HashSet(){{ + add(Constants.CLASS); + add("locations"); + add("connects"); + add("processInstanceJson"); + }}; + + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + pageInfo.setTotalCount((int)processInstanceList.getTotal()); + pageInfo.setLists(CollectionUtils.getListByExclusion(processInstanceList.getRecords(), exclusionSet)); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + return result; + } + + + + /** + * query task list by process instance id + * + * @param loginUser + * @param projectName + * @param processId + * @return + */ + public Map queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId); + List taskInstanceList = processDao.findValidTaskListByProcessId(processId); + AddDependResultForTaskList(taskInstanceList); + Map resultMap = new HashMap<>(); + resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); + resultMap.put(TASK_LIST, taskInstanceList); + result.put(Constants.DATA_LIST, resultMap); + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * add dependent result for dependent task + * @param taskInstanceList + */ + private void AddDependResultForTaskList(List taskInstanceList) throws IOException { + for(TaskInstance taskInstance: taskInstanceList){ + if(taskInstance.getTaskType().toUpperCase().equals(TaskType.DEPENDENT.toString())){ + Result logResult = loggerService.queryLog( + taskInstance.getId(), 0, 4098); + if(logResult.getCode() == Status.SUCCESS.ordinal()){ + String log = (String) logResult.getData(); + Map resultMap = parseLogForDependentResult(log); + taskInstance.setDependentResult(JSONUtils.toJson(resultMap)); + } + } + } + } + + public Map parseLogForDependentResult(String log) throws IOException { + Map resultMap = new HashMap<>(); + if(StringUtils.isEmpty(log)){ + return resultMap; + } + + BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(Charset.forName("utf8"))), Charset.forName("utf8"))); + String line; + while ((line = br.readLine()) != null) { + if(line.contains(DEPENDENT_SPLIT)){ + String[] tmpStringArray = line.split(":\\|\\|"); + if(tmpStringArray.length != 2){ + continue; + } + String dependResultString = tmpStringArray[1]; + String[] dependStringArray = dependResultString.split(","); + if(dependStringArray.length != 2){ + continue; + } + String key = dependStringArray[0].trim(); + DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim()); + resultMap.put(key, dependResult); + } + } + return resultMap; + } + + + /** + * query sub process instance detail info by task id + * + * @param loginUser + * @param projectName + * @param taskId + * @return + */ + public Map querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + TaskInstance taskInstance = processDao.findTaskInstanceById(taskId); + if (taskInstance == null) { + putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); + return result; + } + if (!taskInstance.isSubProcess()) { + putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName()); + return result; + } + + ProcessInstance subWorkflowInstance = processDao.findSubProcessInstance( + taskInstance.getProcessInstanceId(), taskInstance.getId()); + if (subWorkflowInstance == null) { + putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId); + return result; + } + Map dataMap = new HashMap<>(); + dataMap.put("subProcessInstanceId", subWorkflowInstance.getId()); + result.put(Constants.DATA_LIST, dataMap); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * update process instance + * + * @param loginUser + * @param projectName + * @param processInstanceId + * @param processInstanceJson + * @param scheduleTime + * @param syncDefine + * @param flag + * @param locations + * @param connects + * @return + */ + public Map updateProcessInstance(User loginUser, String projectName, Integer processInstanceId, + String processInstanceJson, String scheduleTime, Boolean syncDefine, + Flag flag, String locations, String connects) throws ParseException { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + //check project permission + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + //check process instance exists + ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); + if (processInstance == null) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + return result; + } + + //check process instance status + if (!processInstance.getState().typeIsFinished()) { + putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, + processInstance.getName(), processInstance.getState().toString(), "update"); + return result; + } + Date schedule = null; + if (scheduleTime != null) { + schedule = DateUtils.getScheduleDate(scheduleTime); + } else { + schedule = processInstance.getScheduleTime(); + } + processInstance.setScheduleTime(schedule); + processInstance.setLocations(locations); + processInstance.setConnects(connects); + String globalParams = null; + String originDefParams = null; + int timeout = processInstance.getTimeout(); + ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); + if (StringUtils.isNotEmpty(processInstanceJson)) { + ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class); + //check workflow json is valid + Map checkFlowJson = processDefinitionService.checkProcessNodeList(processData, processInstanceJson); + if (checkFlowJson.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + + originDefParams = JSONUtils.toJson(processData.getGlobalParams()); + List globalParamList = processData.getGlobalParams(); + Map globalParamMap = globalParamList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); + globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, + processInstance.getCmdTypeIfComplement(), schedule); + timeout = processData.getTimeout(); + processInstance.setTimeout(timeout); + Tenant tenant = processDao.getTenantForProcess(processData.getTenantId(), + processDefinition.getUserId()); + if(tenant != null){ + processInstance.setTenantCode(tenant.getTenantCode()); + } + processInstance.setProcessInstanceJson(processInstanceJson); + processInstance.setGlobalParams(globalParams); + } +// int update = processDao.updateProcessInstance(processInstanceId, processInstanceJson, +// globalParams, schedule, flag, locations, connects); + int update = processDao.updateProcessInstance(processInstance); + int updateDefine = 1; + if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) { + processDefinition.setProcessDefinitionJson(processInstanceJson); + processDefinition.setGlobalParams(originDefParams); + processDefinition.setLocations(locations); + processDefinition.setConnects(connects); + processDefinition.setTimeout(timeout); + updateDefine = processDefineMapper.updateById(processDefinition); + } + if (update > 0 && updateDefine > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR); + } + + + return result; + + } + + /** + * query parent process instance detail info by sub process instance id + * + * @param loginUser + * @param projectName + * @param subId + * @return + */ + public Map queryParentInstanceBySubId(User loginUser, String projectName, Integer subId) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + ProcessInstance subInstance = processDao.findProcessInstanceDetailById(subId); + if (subInstance == null) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId); + return result; + } + if (subInstance.getIsSubProcess() == Flag.NO) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName()); + return result; + } + + ProcessInstance parentWorkflowInstance = processDao.findParentProcessInstance(subId); + if (parentWorkflowInstance == null) { + putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST); + return result; + } + Map dataMap = new HashMap<>(); + dataMap.put("parentWorkflowInstance", parentWorkflowInstance.getId()); + result.put(Constants.DATA_LIST, dataMap); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete process instance by id, at the same time,delete task instance and their mapping relation data + * @param loginUser + * @param projectName + * @param processInstanceId + * @param tasksQueue + * @return + */ + public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId,ITaskQueue tasksQueue) { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); + List taskInstanceList = processDao.findValidTaskListByProcessId(processInstanceId); + //process instance priority + int processInstancePriority = processInstance.getProcessInstancePriority().ordinal(); + if (processInstance == null) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + return result; + } + + // delete zk queue + if (CollectionUtils.isNotEmpty(taskInstanceList)){ + for (TaskInstance taskInstance : taskInstanceList){ + // task instance priority + int taskInstancePriority = taskInstance.getTaskInstancePriority().ordinal(); + + StringBuilder nodeValueSb = new StringBuilder(100); + nodeValueSb.append(processInstancePriority) + .append(UNDERLINE) + .append(processInstanceId) + .append(UNDERLINE) + .append(taskInstancePriority) + .append(UNDERLINE) + .append(taskInstance.getId()) + .append(UNDERLINE); + + int taskWorkerGroupId = processDao.getTaskWorkerGroupId(taskInstance); + WorkerGroup workerGroup = workerGroupMapper.selectById(taskWorkerGroupId); + + if(workerGroup == null){ + nodeValueSb.append(DEFAULT_WORKER_ID); + }else { + + String ips = workerGroup.getIpList(); + StringBuilder ipSb = new StringBuilder(100); + String[] ipArray = ips.split(COMMA); + + for (String ip : ipArray) { + long ipLong = IpUtils.ipToLong(ip); + ipSb.append(ipLong).append(COMMA); + } + + if(ipSb.length() > 0) { + ipSb.deleteCharAt(ipSb.length() - 1); + } + nodeValueSb.append(ipSb); + } + + try { + logger.info("delete task queue node : {}",nodeValueSb.toString()); + tasksQueue.removeNode(org.apache.dolphinscheduler.common.Constants.SCHEDULER_TASKS_QUEUE, nodeValueSb.toString()); + }catch (Exception e){ + logger.error("delete task queue node : {}", nodeValueSb.toString()); + } + } + } + + // delete database cascade + int delete = processDao.deleteWorkProcessInstanceById(processInstanceId); + processDao.deleteAllSubWorkProcessByParentId(processInstanceId); + processDao.deleteWorkProcessMapByParentId(processInstanceId); + + if (delete > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR); + } + + return result; + } + + /** + * batch delete process instance by ids, at the same time,delete task instance and their mapping relation data + * + * @param loginUser + * @param projectName + * @param processInstanceIds + * @return + */ + public Map batchDeleteProcessInstanceByIds(User loginUser, String projectName, String processInstanceIds) { + // task queue + ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); + + Map result = new HashMap<>(5); + List deleteFailedIdList = new ArrayList(); + + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + if(StringUtils.isNotEmpty(processInstanceIds)){ + String[] processInstanceIdArray = processInstanceIds.split(","); + + for (String strProcessInstanceId:processInstanceIdArray) { + int processInstanceId = Integer.parseInt(strProcessInstanceId); + try { + deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue); + } catch (Exception e) { + deleteFailedIdList.add(processInstanceId); + } + } + } + if(deleteFailedIdList.size() > 0){ + putMsg(result, Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR,StringUtils.join(deleteFailedIdList.toArray(),",")); + }else{ + putMsg(result, Status.SUCCESS); + } + + return result; + } + + /** + * view process instance variables + * + * @param processInstanceId + * @return + */ + public Map viewVariables( Integer processInstanceId) throws Exception { + Map result = new HashMap<>(5); + + ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); + + if (processInstance == null) { + throw new RuntimeException("workflow instance is null"); + } + + Map timeParams = BusinessTimeUtils + .getBusinessTime(processInstance.getCmdTypeIfComplement(), + processInstance.getScheduleTime()); + + + String workflowInstanceJson = processInstance.getProcessInstanceJson(); + + ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class); + + String userDefinedParams = processInstance.getGlobalParams(); + + // global params + List globalParams = new ArrayList<>(); + + if (userDefinedParams != null && userDefinedParams.length() > 0) { + globalParams = JSON.parseArray(userDefinedParams, Property.class); + } + + + List taskNodeList = workflowData.getTasks(); + + // global param string + String globalParamStr = JSON.toJSONString(globalParams); + globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams); + globalParams = JSON.parseArray(globalParamStr, Property.class); + for (Property property : globalParams) { + timeParams.put(property.getProp(), property.getValue()); + } + + // local params + Map> localUserDefParams = new HashMap<>(); + for (TaskNode taskNode : taskNodeList) { + String parameter = taskNode.getParams(); + Map map = JSONUtils.toMap(parameter); + String localParams = map.get(LOCAL_PARAMS); + if (localParams != null && !localParams.isEmpty()) { + localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams); + List localParamsList = JSON.parseArray(localParams, Property.class); + Map localParamsMap = new HashMap<>(); + localParamsMap.put("taskType",taskNode.getType()); + localParamsMap.put("localParamsList",localParamsList); + if (localParamsList.size() > 0) { + localUserDefParams.put(taskNode.getName(), localParamsMap); + } + } + + } + + Map resultMap = new HashMap<>(); + + resultMap.put(GLOBAL_PARAMS, globalParams); + resultMap.put(LOCAL_PARAMS, localUserDefParams); + + result.put(Constants.DATA_LIST, resultMap); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * encapsulation gantt structure + * + * @param processInstanceId + * @return + * @throws Exception + */ + public Map viewGantt(Integer processInstanceId) throws Exception { + Map result = new HashMap<>(); + + ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); + + if (processInstance == null) { + throw new RuntimeException("workflow instance is null"); + } + + GanttDto ganttDto = new GanttDto(); + + DAG dag = processInstance2DAG(processInstance); + //topological sort + List nodeList = dag.topologicalSort(); + + ganttDto.setTaskNames(nodeList); + + List taskList = new ArrayList<>(); + for (String node : nodeList) { + TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node); + if (taskInstance == null) { + continue; + } + Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); + Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); + Task task = new Task(); + task.setTaskName(taskInstance.getName()); + task.getStartDate().add(startTime.getTime()); + task.getEndDate().add(endTime.getTime()); + task.setIsoStart(startTime); + task.setIsoEnd(endTime); + task.setStatus(taskInstance.getState().toString()); + task.setExecutionDate(taskInstance.getStartTime()); + task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime())); + taskList.add(task); + } + ganttDto.setTasks(taskList); + + result.put(Constants.DATA_LIST, ganttDto); + putMsg(result, Status.SUCCESS); + return result; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java new file mode 100644 index 0000000000..9f6a2d2578 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java @@ -0,0 +1,396 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.ProjectUser; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.*; + +import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc; + +/** + * project service + *HttpTask./ +**/ +@Service +public class ProjectService extends BaseService{ + + private static final Logger logger = LoggerFactory.getLogger(ProjectService.class); + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProjectUserMapper projectUserMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + /** + * create project + * + * @param loginUser + * @param name + * @param desc + * @return + */ + public Map createProject(User loginUser, String name, String desc) { + + Map result = new HashMap<>(5); + Map descCheck = checkDesc(desc); + if (descCheck.get(Constants.STATUS) != Status.SUCCESS) { + return descCheck; + } + + Project project = projectMapper.queryByName(name); + if (project != null) { + putMsg(result, Status.PROJECT_ALREADY_EXISTS, name); + return result; + } + project = new Project(); + Date now = new Date(); + + project.setName(name); + project.setDesc(desc); + project.setUserId(loginUser.getId()); + project.setUserName(loginUser.getUserName()); + project.setCreateTime(now); + project.setUpdateTime(now); + + if (projectMapper.insert(project) > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.CREATE_PROJECT_ERROR); + } + return result; + } + + /** + * query project details by id + * + * @param projectId + * @return + */ + public Map queryById(Integer projectId) { + + Map result = new HashMap<>(5); + Project project = projectMapper.selectById(projectId); + + if (project != null) { + result.put(Constants.DATA_LIST, project); + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.PROJECT_NOT_FOUNT, projectId); + } + return result; + } + + /** + * check project and authorization + * 检查项目权限 + * + * @param loginUser + * @param project + * @param projectName + * @return + */ + public Map checkProjectAndAuth(User loginUser, Project project, String projectName) { + + Map result = new HashMap<>(5); + + if (project == null) { + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + } else if (!checkReadPermission(loginUser, project)) { + // check read permission + putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName); + }else { + putMsg(result, Status.SUCCESS); + } + + + return result; + } + + /** + * admin can view all projects + * 如果是管理员,则所有项目都可见 + * + * @param loginUser + * @param pageSize + * @param pageNo + * @param searchVal + * @return + */ + public Map queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) { + Map result = new HashMap<>(); + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + + Page page = new Page(pageNo, pageSize); + + int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId(); + IPage projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal); + + List projectList = projectIPage.getRecords(); + if(userId != 0){ + for (Project project : projectList) { + project.setPerm(org.apache.dolphinscheduler.common.Constants.DEFAULT_ADMIN_PERMISSION); + } + } + pageInfo.setTotalCount((int)projectIPage.getTotal()); + pageInfo.setLists(projectList); + result.put(Constants.COUNT, (int)projectIPage.getTotal()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * delete project by id + * + * @param loginUser + * @param projectId + * @return + */ + public Map deleteProject(User loginUser, Integer projectId) { + Map result = new HashMap<>(5); + Project project = projectMapper.selectById(projectId); + Map checkResult = getCheckResult(loginUser, project); + if (checkResult != null) { + return checkResult; + } + List processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId); + + if(processDefinitionList.size() > 0){ + putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL); + return result; + } + + int delete = projectMapper.deleteById(projectId); + if (delete > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.DELETE_PROJECT_ERROR); + } + return result; + } + + /** + * get check result + * + * @param loginUser + * @param project + * @return + */ + private Map getCheckResult(User loginUser, Project project) { + Map checkResult = checkProjectAndAuth(loginUser, project, project.getName()); + Status status = (Status) checkResult.get(Constants.STATUS); + if (status != Status.SUCCESS) { + return checkResult; + } + return null; + } + + /** + * updateProcessInstance project + * + * @param loginUser + * @param projectId + * @param projectName + * @param desc + * @return + */ + public Map update(User loginUser, Integer projectId, String projectName, String desc) { + Map result = new HashMap<>(5); + + Project project = projectMapper.selectById(projectId); + Map checkResult = getCheckResult(loginUser, project); + if (checkResult != null) { + return checkResult; + } + Project tempProject = projectMapper.queryByName(projectName); + if (tempProject != null && tempProject.getId() != projectId) { + putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName); + return result; + } + project.setName(projectName); + project.setDesc(desc); + project.setUpdateTime(new Date()); + + int update = projectMapper.updateById(project); + if (update > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.UPDATE_PROJECT_ERROR); + } + return result; + } + + + /** + * query unauthorized project + * + * @param loginUser + * @param userId + * @return + */ + public Map queryUnauthorizedProject(User loginUser, Integer userId) { + Map result = new HashMap<>(5); + if (checkAdmin(loginUser, result)) { + return result; + } + /** + * query all project list except specified userId + */ + List projectList = projectMapper.queryProjectExceptUserId(userId); + List resultList = new ArrayList<>(); + Set projectSet = null; + if (projectList != null && projectList.size() > 0) { + projectSet = new HashSet<>(projectList); + + List authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId); + + resultList = getUnauthorizedProjects(projectSet, authedProjectList); + } + result.put(Constants.DATA_LIST, resultList); + putMsg(result,Status.SUCCESS); + return result; + } + + /** + * get unauthorized project + * + * @param projectSet + * @param authedProjectList + * @return + */ + private List getUnauthorizedProjects(Set projectSet, List authedProjectList) { + List resultList; + Set authedProjectSet = null; + if (authedProjectList != null && authedProjectList.size() > 0) { + authedProjectSet = new HashSet<>(authedProjectList); + projectSet.removeAll(authedProjectSet); + + } + resultList = new ArrayList<>(projectSet); + return resultList; + } + + + /** + * query authorized project + * + * @param loginUser + * @param userId + * @return + */ + public Map queryAuthorizedProject(User loginUser, Integer userId) { + Map result = new HashMap<>(); + + if (checkAdmin(loginUser, result)) { + return result; + } + + List projects = projectMapper.queryAuthedProjectListByUserId(userId); + result.put(Constants.DATA_LIST, projects); + putMsg(result,Status.SUCCESS); + + return result; + } + + + /** + * check whether have read permission + * + * @param user + * @param project + * @return + */ + private boolean checkReadPermission(User user, Project project) { + int permissionId = queryPermission(user, project); + return (permissionId & org.apache.dolphinscheduler.common.Constants.READ_PERMISSION) != 0; + } + + /** + * query permission id + * + * @param user + * @param project + * @return + */ + private int queryPermission(User user, Project project) { + if (user.getUserType() == UserType.ADMIN_USER) { + return org.apache.dolphinscheduler.common.Constants.READ_PERMISSION; + } + + if (project.getUserId() == user.getId()) { + return org.apache.dolphinscheduler.common.Constants.ALL_PERMISSIONS; + } + + ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId()); + + if (projectUser == null) { + return 0; + } + + return projectUser.getPerm(); + + } + + /** + * query all project list that have one or more process definitions. + * @return + */ + public Map queryAllProjectList() { + Map result = new HashMap<>(); + List projects = projectMapper.selectList(null); + List processDefinitions = processDefinitionMapper.selectList(null); + if(projects != null){ + Set set = new HashSet<>(); + for (ProcessDefinition processDefinition : processDefinitions){ + set.add(processDefinition.getProjectId()); + } + List tempDeletelist = new ArrayList(); + for (Project project : projects) { + if(!set.contains(project.getId())){ + tempDeletelist.add(project); + } + } + projects.removeAll(tempDeletelist); + } + result.put(Constants.DATA_LIST, projects); + putMsg(result,Status.SUCCESS); + return result; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java new file mode 100644 index 0000000000..7ffcd02fcb --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java @@ -0,0 +1,259 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.Queue; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.QueueMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * queue service + */ +@Service +public class QueueService extends BaseService { + + private static final Logger logger = LoggerFactory.getLogger(TenantService.class); + + @Autowired + private QueueMapper queueMapper; + + /** + * query queue list + * + * @param loginUser + * @return + */ + public Map queryList(User loginUser) { + Map result = new HashMap<>(5); + if (checkAdmin(loginUser, result)) { + return result; + } + + List queueList = queueMapper.selectList(null); + result.put(Constants.DATA_LIST, queueList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query queue list paging + * + * @param loginUser + * @param searchVal + * @param pageNo + * @param pageSize + * @return + */ + public Map queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(5); + if (checkAdmin(loginUser, result)) { + return result; + } + + Page page = new Page(pageNo, pageSize); + + + IPage queueList = queueMapper.queryQueuePaging(page, searchVal); + + Integer count = (int)queueList.getTotal(); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount(count); + pageInfo.setLists(queueList.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * create queue + * + * @param loginUser + * @param queue + * @param queueName + * @return + */ + public Map createQueue(User loginUser, String queue, String queueName) { + Map result = new HashMap<>(5); + if (checkAdmin(loginUser, result)) { + return result; + } + + if(StringUtils.isEmpty(queue)){ + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queue); + return result; + } + + if(StringUtils.isEmpty(queueName)){ + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queueName); + return result; + } + + if (checkQueueNameExist(queueName)) { + putMsg(result, Status.QUEUE_NAME_EXIST, queueName); + return result; + } + + if (checkQueueExist(queue)) { + putMsg(result, Status.QUEUE_VALUE_EXIST, queue); + return result; + } + + Queue queueObj = new Queue(); + Date now = new Date(); + + queueObj.setQueue(queue); + queueObj.setQueueName(queueName); + queueObj.setCreateTime(now); + queueObj.setUpdateTime(now); + + queueMapper.insert(queueObj); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * update queue + * + * @param loginUser + * @param id + * @param queue + * @param queueName + * @return + */ + public Map updateQueue(User loginUser, int id, String queue, String queueName) { + Map result = new HashMap<>(5); + if (checkAdmin(loginUser, result)) { + return result; + } + + Queue queueObj = queueMapper.selectById(id); + if (queueObj == null) { + putMsg(result, Status.QUEUE_NOT_EXIST, id); + return result; + } + + // whether queue value or queueName is changed + if (queue.equals(queueObj.getQueue()) && queueName.equals(queueObj.getQueueName())) { + putMsg(result, Status.NEED_NOT_UPDATE_QUEUE); + return result; + } + + // check queue name is exist + if (!queueName.equals(queueObj.getQueueName())) { + if(checkQueueNameExist(queueName)){ + putMsg(result, Status.QUEUE_NAME_EXIST, queueName); + return result; + } + } + + // check queue value is exist + if (!queue.equals(queueObj.getQueue())) { + if(checkQueueExist(queue)){ + putMsg(result, Status.QUEUE_VALUE_EXIST, queue); + return result; + } + } + + // update queue + Date now = new Date(); + queueObj.setQueue(queue); + queueObj.setQueueName(queueName); + queueObj.setUpdateTime(now); + + queueMapper.updateById(queueObj); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify queue and queueName + * + * @param queue + * @param queueName + * @return + */ + public Result verifyQueue(String queue, String queueName) { + Result result=new Result(); + + if (StringUtils.isEmpty(queue)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queue); + return result; + } + + if (StringUtils.isEmpty(queueName)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queueName); + return result; + } + + + if(checkQueueNameExist(queueName)){ + logger.error("queue name {} has exist, can't create again.", queueName); + putMsg(result, Status.QUEUE_NAME_EXIST, queueName); + return result; + } + + if(checkQueueExist(queue)){ + logger.error("queue value {} has exist, can't create again.", queue); + putMsg(result, Status.QUEUE_VALUE_EXIST, queue); + return result; + } + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * check queue exist + * + * @param queue + * @return + */ + private boolean checkQueueExist(String queue) { + return queueMapper.queryAllQueueList(queue, null).size()>0 ? false : true; + } + + /** + * check queue name exist + * + * @param queueName + * @return + */ + private boolean checkQueueNameExist(String queueName) { + return queueMapper.queryAllQueueList(null ,queueName).size()>0 ? false : true; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java new file mode 100644 index 0000000000..9764d3a386 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java @@ -0,0 +1,897 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.UdfFunc; +import org.apache.dolphinscheduler.dao.entity.User; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.collections.BeanMap; +import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.dao.mapper.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.multipart.MultipartFile; + +import java.text.MessageFormat; +import java.util.*; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * resources service + */ +@Service +public class ResourcesService extends BaseService { + + private static final Logger logger = LoggerFactory.getLogger(ResourcesService.class); + + @Autowired + private ResourceMapper resourcesMapper; + + @Autowired + private UdfFuncMapper udfFunctionMapper; + + @Autowired + private TenantMapper tenantMapper; + + @Autowired + private UserMapper userMapper; + + @Autowired + private ResourceUserMapper resourceUserMapper; + + /** + * create resource + * + * @param loginUser + * @param type + * @param name + * @param desc + * @param file + * @return + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public Result createResource(User loginUser, + String name, + String desc, + ResourceType type, + MultipartFile file) { + Result result = new Result(); + + // if hdfs not startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + // file is empty + if (file.isEmpty()) { + logger.error("file is empty: {}", file.getOriginalFilename()); + putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); + return result; + } + + // file suffix + String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); + String nameSuffix = FileUtils.suffix(name); + + // determine file suffix + if (!StringUtils.equals(fileSuffix, nameSuffix)) { + /** + * rename file suffix and original suffix must be consistent + * 重命名的后缀必须与原文件后缀一致 + */ + logger.error("rename file suffix and original suffix must be consistent: {}", file.getOriginalFilename()); + putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); + return result; + } + // + //If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar + if (Constants.UDF.equals(type.name())) { + if (!JAR.equalsIgnoreCase(fileSuffix)) { + logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); + putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); + return result; + } + } + if (file.getSize() > Constants.maxFileSize) { + logger.error("file size is too large: {}", file.getOriginalFilename()); + putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); + return result; + } + + // check resoure name exists + if (checkResourceExists(name, 0, type.ordinal())) { + logger.error("resource {} has exist, can't recreate", name); + putMsg(result, Status.RESOURCE_EXIST); + return result; + } + + Date now = new Date(); + + Resource resource = new Resource(name,file.getOriginalFilename(),desc,loginUser.getId(),type,file.getSize(),now,now); + + try { + resourcesMapper.insert(resource); + + putMsg(result, Status.SUCCESS); + Map dataMap = new BeanMap(resource); + Map resultMap = new HashMap(); + for (Object key : dataMap.keySet()) { + if (!"class".equalsIgnoreCase(key.toString())) { + resultMap.put(key.toString(), dataMap.get(key)); + } + } + result.setData(resultMap); + } catch (Exception e) { + logger.error("resource already exists, can't recreate ", e); + putMsg(result, Status.CREATE_RESOURCE_ERROR); + return result; + } + + // fail upload + if (!upload(loginUser, name, file, type)) { + logger.error("upload resource: {} file: {} failed.", name, file.getOriginalFilename()); + putMsg(result, Status.HDFS_OPERATION_ERROR); + throw new RuntimeException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); + } + return result; + } + + private boolean checkResourceExists(String alias, int userId, int type ){ + + List resources = resourcesMapper.queryResourceList(alias, userId, type); + if (resources != null && resources.size() > 0) { + return true; + } + return false; + } + + + + /** + * update resource + * + * @param loginUser + * @param type + * @param name + * @param desc + * @return + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public Result updateResource(User loginUser, + int resourceId, + String name, + String desc, + ResourceType type) { + Result result = new Result(); + + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + + Resource resource = resourcesMapper.selectById(resourceId); + String originResourceName = resource.getAlias(); + if (resource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + if (loginUser.getId() != resource.getUserId()) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + + if (name.equals(resource.getAlias()) && desc.equals(resource.getDesc())) { + putMsg(result, Status.SUCCESS); + return result; + } + + //check resource aleady exists + if (!resource.getAlias().equals(name)) { + if (checkResourceExists(name, 0, type.ordinal())) { + logger.error("resource {} already exists, can't recreate", name); + putMsg(result, Status.RESOURCE_EXIST); + return result; + } + } + + // updateProcessInstance data + Date now = new Date(); + resource.setAlias(name); + resource.setDesc(desc); + resource.setUpdateTime(now); + + try { + resourcesMapper.updateById(resource); + + putMsg(result, Status.SUCCESS); + Map dataMap = new BeanMap(resource); + Map resultMap = new HashMap<>(5); + for (Object key : dataMap.keySet()) { + if (!Constants.CLASS.equalsIgnoreCase(key.toString())) { + resultMap.put(key.toString(), dataMap.get(key)); + } + } + result.setData(resultMap); + } catch (Exception e) { + logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e); + putMsg(result, Status.UPDATE_RESOURCE_ERROR); + return result; + } + // if name unchanged, return directly without moving on HDFS + if (originResourceName.equals(name)) { + return result; + } + + // hdfs move + // query tenant by user id + User user = userMapper.queryDetailsById(resource.getUserId()); + String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); + // get file hdfs path + // delete hdfs file by type + String originHdfsFileName = ""; + String destHdfsFileName = ""; + if (resource.getType().equals(ResourceType.FILE)) { + originHdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, originResourceName); + destHdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, name); + } else if (resource.getType().equals(ResourceType.UDF)) { + originHdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, originResourceName); + destHdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, name); + } + try { + if (HadoopUtils.getInstance().exists(originHdfsFileName)) { + logger.info("hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName); + HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true); + } else { + logger.error("{} not exist", originHdfsFileName); + putMsg(result,Status.RESOURCE_NOT_EXIST); + } + } catch (Exception e) { + logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e); + putMsg(result,Status.HDFS_COPY_FAIL); + } + + return result; + + } + + /** + * query resources list paging + * + * @param loginUser + * @param type + * @param searchVal + * @param pageNo + * @param pageSize + * @return + */ + public Map queryResourceListPaging(User loginUser, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { + + HashMap result = new HashMap<>(5); + Page page = new Page(pageNo, pageSize); + int userId = loginUser.getId(); + if (isAdmin(loginUser)) { + userId= 0; + } + IPage resourceIPage = resourcesMapper.queryResourcePaging(page, + userId, type.ordinal(), searchVal); + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + pageInfo.setTotalCount((int)resourceIPage.getTotal()); + pageInfo.setLists(resourceIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result,Status.SUCCESS); + return result; + } + + /** + * upload file to hdfs + * + * @param loginUser + * @param name + * @param file + */ + private boolean upload(User loginUser, String name, MultipartFile file, ResourceType type) { + // save to local + String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); + String nameSuffix = FileUtils.suffix(name); + + // determine file suffix + if (!StringUtils.equals(fileSuffix, nameSuffix)) { + return false; + } + // query tenant + String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); + // random file name + String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); + + + // save file to hdfs, and delete original file + String hdfsFilename = ""; + String resourcePath = ""; + if (type.equals(ResourceType.FILE)) { + hdfsFilename = HadoopUtils.getHdfsFilename(tenantCode, name); + resourcePath = HadoopUtils.getHdfsResDir(tenantCode); + } else if (type.equals(ResourceType.UDF)) { + hdfsFilename = HadoopUtils.getHdfsUdfFilename(tenantCode, name); + resourcePath = HadoopUtils.getHdfsUdfDir(tenantCode); + } + try { + // if tenant dir not exists + if (!HadoopUtils.getInstance().exists(resourcePath)) { + createTenantDirIfNotExists(tenantCode); + } + org.apache.dolphinscheduler.api.utils.FileUtils.copyFile(file, localFilename); + HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true); + } catch (Exception e) { + logger.error(e.getMessage(), e); + return false; + } + return true; + } + + /** + * query resource list + * + * @param loginUser + * @param type + * @return + */ + public Map queryResourceList(User loginUser, ResourceType type) { + + Map result = new HashMap<>(5); + List resourceList; + int userId = loginUser.getId(); + if(isAdmin(loginUser)){ + userId = 0; + } + resourceList = resourcesMapper.queryResourceList(null, userId, type.ordinal()); + result.put(Constants.DATA_LIST, resourceList); + putMsg(result,Status.SUCCESS); + + return result; + } + + /** + * delete resource + * + * @param loginUser + * @param resourceId + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public Result delete(User loginUser, int resourceId) throws Exception { + Result result = new Result(); + + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + + //get resource and hdfs path + Resource resource = resourcesMapper.selectById(resourceId); + if (resource == null) { + logger.error("resource file not exist, resource id {}", resourceId); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + if (loginUser.getId() != resource.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); + String hdfsFilename = ""; + + // delete hdfs file by type + hdfsFilename = getHdfsFileName(resource, tenantCode, hdfsFilename); + + //delete data in database + resourcesMapper.deleteById(resourceId); + resourceUserMapper.deleteResourceUser(0, resourceId); + //delete file on hdfs + HadoopUtils.getInstance().delete(hdfsFilename, false); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify resource by name and type + * @param name + * @param type + * @param loginUser + * @return + */ + public Result verifyResourceName(String name, ResourceType type,User loginUser) { + Result result = new Result(); + putMsg(result, Status.SUCCESS); + if (checkResourceExists(name, 0, type.ordinal())) { + logger.error("resource type:{} name:{} has exist, can't create again.", type, name); + putMsg(result, Status.RESOURCE_EXIST); + } else { + // query tenant + Tenant tenant = tenantMapper.queryById(loginUser.getTenantId()); + if(tenant != null){ + String tenantCode = tenant.getTenantCode(); + + try { + String hdfsFilename = getHdfsFileName(type,tenantCode,name); + if(HadoopUtils.getInstance().exists(hdfsFilename)){ + logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, name,hdfsFilename); + putMsg(result, Status.RESOURCE_FILE_EXIST,hdfsFilename); + } + + } catch (Exception e) { + logger.error(e.getMessage(),e); + putMsg(result,Status.HDFS_OPERATION_ERROR); + } + }else{ + putMsg(result,Status.TENANT_NOT_EXIST); + } + } + + + return result; + } + + /** + * verify resource by name and type + * + * @param name + * @return + */ + public Result verifyResourceName(String name, ResourceType type) { + Result result = new Result(); + if (checkResourceExists(name, 0, type.ordinal())) { + logger.error("resource type:{} name:{} has exist, can't create again.", type, name); + putMsg(result, Status.RESOURCE_EXIST); + } else { + putMsg(result, Status.SUCCESS); + } + + return result; + } + + /** + * view resource file online + * + * @param resourceId + * @return + */ + public Result readResource(int resourceId, int skipLineNum, int limit) { + Result result = new Result(); + + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + + // get resource by id + Resource resource = resourcesMapper.selectById(resourceId); + if (resource == null) { + logger.error("resouce file not exist, resource id {}", resourceId); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + //check preview or not by file suffix + String nameSuffix = FileUtils.suffix(resource.getAlias()); + String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); + if (StringUtils.isNotEmpty(resourceViewSuffixs)) { + List strList = Arrays.asList(resourceViewSuffixs.split(",")); + if (!strList.contains(nameSuffix)) { + logger.error("resouce suffix {} not support view, resource id {}", nameSuffix, resourceId); + putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); + return result; + } + } + + User user = userMapper.queryDetailsById(resource.getUserId()); + String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); + // hdfs path + String hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); + logger.info("resource hdfs path is {} ", hdfsFileName); + try { + if(HadoopUtils.getInstance().exists(hdfsFileName)){ + List content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit); + + putMsg(result, Status.SUCCESS); + Map map = new HashMap<>(); + map.put(ALIAS, resource.getAlias()); + map.put(CONTENT, StringUtils.join(content.toArray(), "\n")); + result.setData(map); + }else{ + logger.error("read file {} not exist in hdfs", hdfsFileName); + putMsg(result, Status.RESOURCE_FILE_NOT_EXIST,hdfsFileName); + } + + } catch (Exception e) { + logger.error(String.format("Resource %s read failed", hdfsFileName), e); + putMsg(result, Status.HDFS_OPERATION_ERROR); + } + + return result; + } + + /** + * create resource file online + * + * @param loginUser + * @param type + * @param fileName + * @param fileSuffix + * @param desc + * @param content + * @return + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content) { + Result result = new Result(); + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + + //check file suffix + String nameSuffix = fileSuffix.trim(); + String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); + if (StringUtils.isNotEmpty(resourceViewSuffixs)) { + List strList = Arrays.asList(resourceViewSuffixs.split(",")); + if (!strList.contains(nameSuffix)) { + logger.error("resouce suffix {} not support create", nameSuffix); + putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); + return result; + } + } + + String name = fileName.trim() + "." + nameSuffix; + + result = verifyResourceName(name,type,loginUser); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } + + // save data + Date now = new Date(); + Resource resource = new Resource(name,name,desc,loginUser.getId(),type,content.getBytes().length,now,now); + + resourcesMapper.insert(resource); + + putMsg(result, Status.SUCCESS); + Map dataMap = new BeanMap(resource); + Map resultMap = new HashMap<>(5); + for (Object key : dataMap.keySet()) { + if (!Constants.CLASS.equalsIgnoreCase(key.toString())) { + resultMap.put(key.toString(), dataMap.get(key)); + } + } + result.setData(resultMap); + + String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); + + result = uploadContentToHdfs(name, tenantCode, content); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + throw new RuntimeException(result.getMsg()); + } + return result; + } + + /** + * updateProcessInstance resource + * + * @param resourceId + * @return + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public Result updateResourceContent(int resourceId, String content) { + Result result = new Result(); + + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + + Resource resource = resourcesMapper.selectById(resourceId); + if (resource == null) { + logger.error("read file not exist, resource id {}", resourceId); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + //check can edit by file suffix + String nameSuffix = FileUtils.suffix(resource.getAlias()); + String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); + if (StringUtils.isNotEmpty(resourceViewSuffixs)) { + List strList = Arrays.asList(resourceViewSuffixs.split(",")); + if (!strList.contains(nameSuffix)) { + logger.error("resouce suffix {} not support updateProcessInstance, resource id {}", nameSuffix, resourceId); + putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); + return result; + } + } + + resource.setSize(content.getBytes().length); + resource.setUpdateTime(new Date()); + resourcesMapper.updateById(resource); + + User user = userMapper.queryDetailsById(resource.getUserId()); + String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); + + result = uploadContentToHdfs(resource.getAlias(), tenantCode, content); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + throw new RuntimeException(result.getMsg()); + } + return result; + } + + /** + * @param resourceName + * @param tenantCode + * @param content + * @return + */ + private Result uploadContentToHdfs(String resourceName, String tenantCode, String content) { + Result result = new Result(); + String localFilename = ""; + String hdfsFileName = ""; + try { + localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); + + if (!FileUtils.writeContent2File(content, localFilename)) { + // write file fail + logger.error("file {} fail, content is {}", localFilename, content); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + + // get file hdfs path + hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resourceName); + String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); + logger.info("resource hdfs path is {} ", hdfsFileName); + + HadoopUtils hadoopUtils = HadoopUtils.getInstance(); + if (!hadoopUtils.exists(resourcePath)) { + // create if tenant dir not exists + createTenantDirIfNotExists(tenantCode); + } + if (hadoopUtils.exists(hdfsFileName)) { + hadoopUtils.delete(hdfsFileName, false); + } + + hadoopUtils.copyLocalToHdfs(localFilename, hdfsFileName, true, true); + } catch (Exception e) { + logger.error(e.getMessage(), e); + result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); + result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, hdfsFileName)); + return result; + } + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * download file + * + * @param resourceId + * @return + */ + public org.springframework.core.io.Resource downloadResource(int resourceId) throws Exception { + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + throw new RuntimeException("hdfs not startup"); + } + + Resource resource = resourcesMapper.selectById(resourceId); + if (resource == null) { + logger.error("download file not exist, resource id {}", resourceId); + return null; + } + User user = userMapper.queryDetailsById(resource.getUserId()); + String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); + + String hdfsFileName = ""; + hdfsFileName = getHdfsFileName(resource, tenantCode, hdfsFileName); + + String localFileName = FileUtils.getDownloadFilename(resource.getAlias()); + logger.info("resource hdfs path is {} ", hdfsFileName); + + HadoopUtils.getInstance().copyHdfsToLocal(hdfsFileName, localFileName, false, true); + org.springframework.core.io.Resource file = org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName); + return file; + } + + + /** + * unauthorized file + * + * @param loginUser + * @param userId + * @return + */ + public Map unauthorizedFile(User loginUser, Integer userId) { + + Map result = new HashMap<>(); + if (checkAdmin(loginUser, result)) { + return result; + } + List resourceList = resourcesMapper.queryResourceExceptUserId(userId); + List list ; + if (resourceList != null && resourceList.size() > 0) { + Set resourceSet = new HashSet<>(resourceList); + List authedResourceList = resourcesMapper.queryAuthorizedResourceList(userId); + + getAuthorizedResourceList(resourceSet, authedResourceList); + list = new ArrayList<>(resourceSet); + }else { + list = new ArrayList<>(0); + } + + result.put(Constants.DATA_LIST, list); + putMsg(result,Status.SUCCESS); + return result; + } + + + + + /** + * unauthorized udf function + * + * @param loginUser + * @param userId + * @return + */ + public Map unauthorizedUDFFunction(User loginUser, Integer userId) { + Map result = new HashMap<>(5); + //only admin can operate + if (checkAdmin(loginUser, result)) { + return result; + } + + List udfFuncList = udfFunctionMapper.queryUdfFuncExceptUserId(userId); + List resultList = new ArrayList<>(); + Set udfFuncSet = null; + if (udfFuncList != null && udfFuncList.size() > 0) { + udfFuncSet = new HashSet<>(udfFuncList); + + List authedUDFFuncList = udfFunctionMapper.queryAuthedUdfFunc(userId); + + getAuthorizedResourceList(udfFuncSet, authedUDFFuncList); + resultList = new ArrayList<>(udfFuncSet); + } + result.put(Constants.DATA_LIST, resultList); + putMsg(result,Status.SUCCESS); + return result; + } + + + + + /** + * authorized udf function + * + * @param loginUser + * @param userId + * @return + */ + public Map authorizedUDFFunction(User loginUser, Integer userId) { + Map result = new HashMap<>(); + if (checkAdmin(loginUser, result)) { + return result; + } + List udfFuncs = udfFunctionMapper.queryAuthedUdfFunc(userId); + result.put(Constants.DATA_LIST, udfFuncs); + putMsg(result,Status.SUCCESS); + return result; + } + + + /** + * authorized file + * + * @param loginUser + * @param userId + * @return + */ + public Map authorizedFile(User loginUser, Integer userId) { + Map result = new HashMap<>(5); + if (checkAdmin(loginUser, result)){ + return result; + } + List authedResources = resourcesMapper.queryAuthorizedResourceList(userId); + + result.put(Constants.DATA_LIST, authedResources); + putMsg(result,Status.SUCCESS); + return result; + } + + /** + * get hdfs file name + * + * @param resource + * @param tenantCode + * @param hdfsFileName + * @return + */ + private String getHdfsFileName(Resource resource, String tenantCode, String hdfsFileName) { + if (resource.getType().equals(ResourceType.FILE)) { + hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); + } else if (resource.getType().equals(ResourceType.UDF)) { + hdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, resource.getAlias()); + } + return hdfsFileName; + } + + /** + * get hdfs file name + * + * @param resourceType + * @param tenantCode + * @param hdfsFileName + * @return + */ + private String getHdfsFileName(ResourceType resourceType, String tenantCode, String hdfsFileName) { + if (resourceType.equals(ResourceType.FILE)) { + hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, hdfsFileName); + } else if (resourceType.equals(ResourceType.UDF)) { + hdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, hdfsFileName); + } + return hdfsFileName; + } + + /** + * get authorized resource list + * + * @param resourceSet + * @param authedResourceList + */ + private void getAuthorizedResourceList(Set resourceSet, List authedResourceList) { + Set authedResourceSet = null; + if (authedResourceList != null && authedResourceList.size() > 0) { + authedResourceSet = new HashSet<>(authedResourceList); + resourceSet.removeAll(authedResourceSet); + + } + } + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java new file mode 100644 index 0000000000..aa64ad76b8 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java @@ -0,0 +1,594 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + + +import org.apache.dolphinscheduler.api.dto.ScheduleParam; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.model.MasterServer; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; +import org.apache.dolphinscheduler.dao.utils.cron.CronUtils; +import org.apache.dolphinscheduler.server.quartz.ProcessScheduleJob; +import org.apache.dolphinscheduler.server.quartz.QuartzExecutors; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang3.StringUtils; +import org.quartz.CronExpression; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.io.IOException; +import java.text.ParseException; +import java.util.*; + +/** + * scheduler service + */ +@Service +public class SchedulerService extends BaseService { + + private static final Logger logger = LoggerFactory.getLogger(SchedulerService.class); + + @Autowired + private ProjectService projectService; + + @Autowired + private ExecutorService executorService; + + @Autowired + private MonitorService monitorService; + + @Autowired + private ProcessDao processDao; + + @Autowired + private ScheduleMapper scheduleMapper; + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + /** + * save schedule + * + * @param loginUser + * @param projectName + * @param processDefineId + * @param schedule + * @param warningType + * @param warningGroupId + * @param failureStrategy + * @return + */ + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) + public Map insertSchedule(User loginUser, String projectName, Integer processDefineId, String schedule, WarningType warningType, + int warningGroupId, FailureStrategy failureStrategy, + String receivers, String receiversCc, Priority processInstancePriority, int workerGroupId) throws IOException { + + Map result = new HashMap(5); + + Project project = projectMapper.queryByName(projectName); + + // check project auth + Map checkResult = checkAuth(loginUser, projectName, project); + if (checkResult != null) { + return checkResult; + } + + // check work flow define release state + ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId); + result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + + Schedule scheduleObj = new Schedule(); + Date now = new Date(); + + scheduleObj.setProjectName(projectName); + scheduleObj.setProcessDefinitionId(processDefinition.getId()); + scheduleObj.setProcessDefinitionName(processDefinition.getName()); + + ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); + if (DateUtils.differSec(scheduleParam.getStartTime(),scheduleParam.getEndTime()) == 0) { + logger.warn("The start time must not be the same as the end"); + putMsg(result,Status.SCHEDULE_START_TIME_END_TIME_SAME); + return result; + } + scheduleObj.setStartTime(scheduleParam.getStartTime()); + scheduleObj.setEndTime(scheduleParam.getEndTime()); + if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { + logger.error(scheduleParam.getCrontab() + " verify failure"); + + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); + return result; + } + scheduleObj.setCrontab(scheduleParam.getCrontab()); + scheduleObj.setWarningType(warningType); + scheduleObj.setWarningGroupId(warningGroupId); + scheduleObj.setFailureStrategy(failureStrategy); + scheduleObj.setCreateTime(now); + scheduleObj.setUpdateTime(now); + scheduleObj.setUserId(loginUser.getId()); + scheduleObj.setUserName(loginUser.getUserName()); + scheduleObj.setReleaseState(ReleaseState.OFFLINE); + scheduleObj.setProcessInstancePriority(processInstancePriority); + scheduleObj.setWorkerGroupId(workerGroupId); + scheduleMapper.insert(scheduleObj); + + /** + * updateProcessInstance receivers and cc by process definition id + */ + processDefinition.setReceivers(receivers); + processDefinition.setReceiversCc(receiversCc); + processDefinitionMapper.updateById(processDefinition); + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * updateProcessInstance schedule + * + * @param loginUser + * @param projectName + * @param id + * @param scheduleExpression + * @param warningType + * @param warningGroupId + * @param failureStrategy + * @param scheduleStatus + * @param workerGroupId + * @return + */ + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) + public Map updateSchedule(User loginUser, String projectName, Integer id, String scheduleExpression, WarningType warningType, + int warningGroupId, FailureStrategy failureStrategy, + String receivers, String receiversCc, ReleaseState scheduleStatus, + Priority processInstancePriority, int workerGroupId) throws IOException { + Map result = new HashMap(5); + + Project project = projectMapper.queryByName(projectName); + + // check project auth + Map checkResult = checkAuth(loginUser, projectName, project); + if (checkResult != null) { + return checkResult; + } + + // check schedule exists + Schedule schedule = scheduleMapper.selectById(id); + + if (schedule == null) { + putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); + return result; + } + + ProcessDefinition processDefinition = processDao.findProcessDefineById(schedule.getProcessDefinitionId()); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); + return result; + } + + /** + * scheduling on-line status forbid modification + */ + if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { + return result; + } + + Date now = new Date(); + + // updateProcessInstance param + if (StringUtils.isNotEmpty(scheduleExpression)) { + ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); + if (DateUtils.differSec(scheduleParam.getStartTime(),scheduleParam.getEndTime()) == 0) { + logger.warn("The start time must not be the same as the end"); + putMsg(result,Status.SCHEDULE_START_TIME_END_TIME_SAME); + return result; + } + schedule.setStartTime(scheduleParam.getStartTime()); + schedule.setEndTime(scheduleParam.getEndTime()); + if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { + putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); + return result; + } + schedule.setCrontab(scheduleParam.getCrontab()); + } + + if (warningType != null) { + schedule.setWarningType(warningType); + } + + schedule.setWarningGroupId(warningGroupId); + + if (failureStrategy != null) { + schedule.setFailureStrategy(failureStrategy); + } + + if (scheduleStatus != null) { + schedule.setReleaseState(scheduleStatus); + } + schedule.setWorkerGroupId(workerGroupId); + schedule.setUpdateTime(now); + schedule.setProcessInstancePriority(processInstancePriority); + scheduleMapper.updateById(schedule); + + /** + * updateProcessInstance recipients and cc by process definition ID + */ + processDefinition.setReceivers(receivers); + processDefinition.setReceiversCc(receiversCc); + processDefinitionMapper.updateById(processDefinition); + + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * set schedule online or offline + * + * @param loginUser + * @param projectName + * @param id + * @param scheduleStatus + * @return + */ + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) + public Map setScheduleState(User loginUser, String projectName, Integer id, ReleaseState scheduleStatus) { + + Map result = new HashMap(5); + + Project project = projectMapper.queryByName(projectName); + Map checkResult = checkAuth(loginUser, projectName, project); + if (checkResult != null) { + return checkResult; + } + + // check schedule exists + Schedule scheduleObj = scheduleMapper.selectById(id); + + if (scheduleObj == null) { + putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); + return result; + } + // check schedule release state + if(scheduleObj.getReleaseState() == scheduleStatus){ + logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", + scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); + putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); + return result; + } + ProcessDefinition processDefinition = processDao.findProcessDefineById(scheduleObj.getProcessDefinitionId()); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); + return result; + } + + if(scheduleStatus == ReleaseState.ONLINE){ + // check process definition release state + if(processDefinition.getReleaseState() != ReleaseState.ONLINE){ + logger.info("not release process definition id: {} , name : {}", + processDefinition.getId(), processDefinition.getName()); + putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, scheduleObj.getProcessDefinitionId()); + return result; + } + // check sub process definition release state + List subProcessDefineIds = new ArrayList<>(); + processDao.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); + Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); + if (subProcessDefineIds.size() > 0){ + List subProcessDefinitionList = + processDefinitionMapper.queryDefinitionListByIdList(idArray); + if (subProcessDefinitionList != null && subProcessDefinitionList.size() > 0){ + for (ProcessDefinition subProcessDefinition : subProcessDefinitionList){ + /** + * if there is no online process, exit directly + */ + if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE){ + logger.info("not release process definition id: {} , name : {}", + subProcessDefinition.getId(), subProcessDefinition.getName()); + putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); + return result; + } + } + } + } + } + + // check master server exists + List masterServers = monitorService.getServerListFromZK(true); + + + if (masterServers.size() == 0) { + putMsg(result, Status.MASTER_NOT_EXISTS); + } + + // set status + scheduleObj.setReleaseState(scheduleStatus); + + scheduleMapper.updateById(scheduleObj); + + try { + switch (scheduleStatus) { + case ONLINE: { + logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}, port: {}", project.getId(), processDefinition.getId(), masterServers); + setSchedule(project.getId(), id); + break; + } + case OFFLINE: { + logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}, port: {}", project.getId(), processDefinition.getId(), masterServers); + deleteSchedule(project.getId(), id); + break; + } + default: { + putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); + return result; + } + } + } catch (Exception e) { + result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); + throw new RuntimeException(result.get(Constants.MSG).toString()); + } + + putMsg(result, Status.SUCCESS); + return result; + } + + + + /** + * query schedule + * + * @param loginUser + * @param projectName + * @param processDefineId + * @return + */ + public Map querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) { + + HashMap result = new HashMap<>(); + + Project project = projectMapper.queryByName(projectName); + + // check project auth + Map checkResult = checkAuth(loginUser, projectName, project); + if (checkResult != null) { + return checkResult; + } + + ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); + return result; + } + Page page = new Page(pageNo, pageSize); + IPage scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging( + page, processDefineId, searchVal + ); + + + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + pageInfo.setTotalCount((int)scheduleIPage.getTotal()); + pageInfo.setLists(scheduleIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query schedule list + * + * @param loginUser + * @param projectName + * @return + */ + public Map queryScheduleList(User loginUser, String projectName) { + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + // check project auth + Map checkResult = checkAuth(loginUser, projectName, project); + if (checkResult != null) { + return checkResult; + } + + List schedules = scheduleMapper.querySchedulerListByProjectName(projectName); + + result.put(Constants.DATA_LIST, schedules); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * set schedule + * + * @see + */ + public void setSchedule(int projectId, int scheduleId) throws RuntimeException{ + logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId); + + + Schedule schedule = processDao.querySchedule(scheduleId); + if (schedule == null) { + logger.warn("process schedule info not exists"); + } + + Date startDate = schedule.getStartTime(); + Date endDate = schedule.getEndTime(); + + String jobName = QuartzExecutors.buildJobName(scheduleId); + String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); + + Map dataMap = QuartzExecutors.buildDataMap(projectId, scheduleId, schedule); + + QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, jobName, jobGroupName, startDate, endDate, + schedule.getCrontab(), dataMap); + + } + + /** + * delete schedule + */ + public static void deleteSchedule(int projectId, int scheduleId) throws RuntimeException{ + logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId); + + String jobName = QuartzExecutors.buildJobName(scheduleId); + String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); + + if(!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)){ + logger.warn("set offline failure:projectId:{},scheduleId:{}",projectId,scheduleId); + throw new RuntimeException(String.format("set offline failure")); + } + + } + + /** + * check valid + * + * @param result + * @param bool + * @param status + * @return + */ + private boolean checkValid(Map result, boolean bool, Status status) { + // timeout is valid + if (bool) { + putMsg(result, status); + return true; + } + return false; + } + + /** + * + * @param loginUser + * @param projectName + * @param project + * @return + */ + private Map checkAuth(User loginUser, String projectName, Project project) { + // check project auth + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + return null; + } + + /** + * delete schedule by id + * + * @param loginUser + * @param projectName + * @param scheduleId + * @return + */ + public Map deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + Schedule schedule = scheduleMapper.selectById(scheduleId); + + if (schedule == null) { + putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); + return result; + } + + // Determine if the login user is the owner of the schedule + if (loginUser.getId() != schedule.getUserId()) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + // check schedule is already online + if(schedule.getReleaseState() == ReleaseState.ONLINE){ + putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE,schedule.getId()); + return result; + } + + + int delete = scheduleMapper.deleteById(scheduleId); + + if (delete > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); + } + return result; + } + + /** + * preview schedule + * @param loginUser + * @param projectName + * @param schedule + * @return + */ + public Map previewSchedule(User loginUser, String projectName, String schedule) { + Map result = new HashMap<>(5); + CronExpression cronExpression; + ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); + Date now = new Date(); + + Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime(); + Date endTime = scheduleParam.getEndTime(); + try { + cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab()); + } catch (ParseException e) { + logger.error(e.getMessage(),e); + putMsg(result,Status.PARSE_TO_CRON_EXPRESSION_ERROR); + return result; + } + List selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime,cronExpression); + result.put(Constants.DATA_LIST, selfFireDateList.stream().map(t -> DateUtils.dateToString(t)).limit(org.apache.dolphinscheduler.common.Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT)); + putMsg(result, Status.SUCCESS); + return result; + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java new file mode 100644 index 0000000000..391aa0fbe5 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + + +import org.apache.dolphinscheduler.api.controller.BaseController; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.dao.entity.Session; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.SessionMapper; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import java.util.Date; +import java.util.List; +import java.util.UUID; + +/** + * session service + */ +@Service +public class SessionService extends BaseService{ + + private static final Logger logger = LoggerFactory.getLogger(SessionService.class); + + @Autowired + private SessionMapper sessionMapper; + + /** + * get user session from request + * + * @param request + * @return + */ + public Session getSession(HttpServletRequest request) { + String sessionId = request.getHeader(Constants.SESSION_ID); + + if(StringUtils.isBlank(sessionId)) { + Cookie cookie = getCookie(request, Constants.SESSION_ID); + + if (cookie != null) { + sessionId = cookie.getValue(); + } + } + + if(StringUtils.isBlank(sessionId)) { + return null; + } + + String ip = BaseController.getClientIpAddress(request); + logger.debug("get session: {}, ip: {}", sessionId, ip); + + return sessionMapper.selectById(sessionId); + } + + /** + * create session + * + * @param user + * @param ip + * @return + */ + public String createSession(User user, String ip) { + Session session = null; + + // logined + List sessionList = sessionMapper.queryByUserId(user.getId()); + + Date now = new Date(); + + /** + * if you have logged in and are still valid, return directly + */ + if (CollectionUtils.isNotEmpty(sessionList)) { + // is session list greater 1 , delete other ,get one + if (sessionList.size() > 1){ + for (int i=1 ; i < sessionList.size();i++){ + sessionMapper.deleteById(sessionList.get(i).getId()); + } + } + session = sessionList.get(0); + if (now.getTime() - session.getLastLoginTime().getTime() <= Constants.SESSION_TIME_OUT * 1000) { + /** + * updateProcessInstance the latest login time + */ + session.setLastLoginTime(now); + sessionMapper.updateById(session); + + return session.getId(); + + } else { + /** + * session expired, then delete this session first + */ + sessionMapper.deleteById(session.getId()); + } + } + + // assign new session + session = new Session(); + + session.setId(UUID.randomUUID().toString()); + session.setIp(ip); + session.setUserId(user.getId()); + session.setLastLoginTime(now); + + sessionMapper.insert(session); + + return session.getId(); + } + + /** + * sign out + * remove ip restrictions + * + * @param ip no use + * @param loginUser + */ + public void signOut(String ip, User loginUser) { + /** + * query session by user id and ip + */ + List sessionList = sessionMapper.queryByUserId(loginUser.getId()); + + for (Session session : sessionList){ + //delete session + sessionMapper.deleteById(session.getId()); + } + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java new file mode 100644 index 0000000000..6146eb6b58 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java @@ -0,0 +1,134 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.text.MessageFormat; +import java.util.*; + +/** + * task instance service + */ +@Service +public class TaskInstanceService extends BaseService { + + private static final Logger logger = LoggerFactory.getLogger(TaskInstanceService.class); + + @Autowired + ProjectMapper projectMapper; + + @Autowired + ProjectService projectService; + + @Autowired + ProcessDao processDao; + + @Autowired + TaskInstanceMapper taskInstanceMapper; + + + /** + * query task list by project, process instance, task name, task start time, task end time, task status, keyword paging + * + * @param loginUser + * @param projectName + * @param processInstanceId + * @param taskName + * @param startDate + * @param endDate + * @param searchVal + * @param stateType + * @param pageNo + * @param pageSize + * @return + */ + public Map queryTaskListPaging(User loginUser, String projectName, + Integer processInstanceId, String taskName, String startDate, String endDate, + String searchVal, ExecutionStatus stateType,String host, + Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status status = (Status) checkResult.get(Constants.STATUS); + if (status != Status.SUCCESS) { + return checkResult; + } + + int[] statusArray = null; + String statesStr = null; + // filter by status + if(stateType != null){ + statusArray = new int[]{stateType.ordinal()}; + } + if(statusArray != null){ + statesStr = Arrays.toString(statusArray).replace("[", "").replace("]",""); + } + + Date start = null; + Date end = null; + try { + if(StringUtils.isNotEmpty(startDate)){ + start = DateUtils.getScheduleDate(startDate); + } + if(StringUtils.isNotEmpty( endDate)){ + end = DateUtils.getScheduleDate(endDate); + } + } catch (Exception e) { + result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); + return result; + } + + Page page = new Page(pageNo, pageSize); + IPage taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging( + page, project.getId(), processInstanceId, searchVal, taskName, statesStr, host, start, end + ); + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + Set exclusionSet = new HashSet(){{ + add(Constants.CLASS); + add("taskJson"); + }}; + pageInfo.setTotalCount((int)taskInstanceIPage.getTotal()); + pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(),exclusionSet)); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java new file mode 100644 index 0000000000..61a98f8fd6 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.dao.TaskRecordDao; +import org.apache.dolphinscheduler.dao.entity.TaskRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * task record service + */ +@Service +public class TaskRecordService extends BaseService{ + + private static final Logger logger = LoggerFactory.getLogger(TaskRecordService.class); + + /** + * query task record list paging + * + * @param taskName + * @param startDate + * @param taskDate + * @param sourceTable + * @param destTable + * @param endDate + * @param state + * @param pageNo + * @param pageSize + * @return + */ + public Map queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate, + String taskDate, String sourceTable, + String destTable, String endDate, + String state, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(10); + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + + Map map = new HashMap<>(10); + map.put("taskName", taskName); + map.put("taskDate", taskDate); + map.put("state", state); + map.put("sourceTable", sourceTable); + map.put("targetTable", destTable); + map.put("startTime", startDate); + map.put("endTime", endDate); + map.put("offset", pageInfo.getStart().toString()); + map.put("pageSize", pageInfo.getPageSize().toString()); + + String table = isHistory ? TASK_RECORD_TABLE_HISTORY_HIVE_LOG : TASK_RECORD_TABLE_HIVE_LOG; + int count = TaskRecordDao.countTaskRecord(map, table); + List recordList = TaskRecordDao.queryAllTaskRecord(map, table); + pageInfo.setTotalCount(count); + pageInfo.setLists(recordList); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java new file mode 100644 index 0000000000..ab8a5dd3cc --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java @@ -0,0 +1,300 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.TenantMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.FileStatus; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * tenant service + */ +@Service +public class TenantService extends BaseService{ + + private static final Logger logger = LoggerFactory.getLogger(TenantService.class); + + @Autowired + private TenantMapper tenantMapper; + + /** + * create tenant + * + * @param loginUser + * @param tenantCode + * @param tenantName + * @param queueId + * @param desc + * @return + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public Map createTenant(User loginUser, + String tenantCode, + String tenantName, + int queueId, + String desc) throws Exception { + + Map result = new HashMap<>(5); + result.put(Constants.STATUS, false); + if (checkAdmin(loginUser, result)) { + return result; + } + + if (!checkTenant(tenantCode)){ + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, tenantCode); + return result; + } + + + Tenant tenant = new Tenant(); + Date now = new Date(); + + if (!tenantCode.matches("^[0-9a-zA-Z_.-]{1,}$") || tenantCode.startsWith("-") || tenantCode.startsWith(".")){ + putMsg(result, Status.VERIFY_TENANT_CODE_ERROR); + return result; + } + tenant.setTenantCode(tenantCode); + tenant.setTenantName(tenantName); + tenant.setQueueId(queueId); + tenant.setDescription(desc); + tenant.setCreateTime(now); + tenant.setUpdateTime(now); + + // save + tenantMapper.insert(tenant); + + // if hdfs startup + if (PropertyUtils.getResUploadStartupState()){ + createTenantDirIfNotExists(tenantCode); + } + + putMsg(result, Status.SUCCESS); + + return result; +} + + + + /** + * query tenant list paging + * + * @param loginUser + * @param searchVal + * @param pageNo + * @param pageSize + * @return + */ + public Map queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + + Map result = new HashMap<>(5); + if (checkAdmin(loginUser, result)) { + return result; + } + + Page page = new Page(pageNo, pageSize); + IPage tenantIPage = tenantMapper.queryTenantPaging(page, searchVal); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int)tenantIPage.getTotal()); + pageInfo.setLists(tenantIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * updateProcessInstance tenant + * + * @param loginUser + * @param tenantCode + * @param tenantName + * @param queueId + * @param desc + * @return + */ + public Map updateTenant(User loginUser,int id,String tenantCode, String tenantName, int queueId, String desc) throws Exception { + + Map result = new HashMap<>(5); + result.put(Constants.STATUS, false); + + if (checkAdmin(loginUser, result)) { + return result; + } + + Tenant tenant = tenantMapper.queryById(id); + + if (tenant == null){ + putMsg(result, Status.TENANT_NOT_EXIST); + return result; + } + + // updateProcessInstance tenant + /** + * if the tenant code is modified, the original resource needs to be copied to the new tenant. + */ + if (!tenant.getTenantCode().equals(tenantCode)){ + if (checkTenant(tenantCode)){ + // if hdfs startup + if (PropertyUtils.getResUploadStartupState()){ + String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources"; + String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); + //init hdfs resource + HadoopUtils.getInstance().mkdir(resourcePath); + HadoopUtils.getInstance().mkdir(udfsPath); + } + }else { + putMsg(result, Status.TENANT_CODE_HAS_ALREADY_EXISTS); + return result; + } + } + + Date now = new Date(); + + if (StringUtils.isNotEmpty(tenantCode)){ + tenant.setTenantCode(tenantCode); + } + + if (StringUtils.isNotEmpty(tenantName)){ + tenant.setTenantName(tenantName); + } + + if (queueId != 0){ + tenant.setQueueId(queueId); + } + tenant.setDescription(desc); + tenant.setUpdateTime(now); + tenantMapper.updateById(tenant); + + result.put(Constants.STATUS, Status.SUCCESS); + result.put(Constants.MSG, Status.SUCCESS.getMsg()); + return result; + } + + /** + * delete tenant + * + * @param loginUser + * @param id + * @return + */ + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) + public Map deleteTenantById(User loginUser, int id) throws Exception { + Map result = new HashMap<>(5); + + if (checkAdmin(loginUser, result)) { + return result; + } + + Tenant tenant = tenantMapper.queryById(id); + + if (tenant == null){ + putMsg(result, Status.TENANT_NOT_EXIST); + return result; + } + + // if resource upload startup + if (PropertyUtils.getResUploadStartupState()){ + String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode(); + + if (HadoopUtils.getInstance().exists(tenantPath)){ + String resourcePath = HadoopUtils.getHdfsResDir(tenant.getTenantCode()); + FileStatus[] fileStatus = HadoopUtils.getInstance().listFileStatus(resourcePath); + if (fileStatus.length > 0) { + putMsg(result, Status.HDFS_TERANT_RESOURCES_FILE_EXISTS); + return result; + } + fileStatus = HadoopUtils.getInstance().listFileStatus(HadoopUtils.getHdfsUdfDir(tenant.getTenantCode())); + if (fileStatus.length > 0) { + putMsg(result, Status.HDFS_TERANT_UDFS_FILE_EXISTS); + return result; + } + + HadoopUtils.getInstance().delete(tenantPath, true); + } + } + + tenantMapper.deleteById(id); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query tenant list + * + * @param loginUser + * @return + */ + public Map queryTenantList(User loginUser) { + + Map result = new HashMap<>(5); + + List resourceList = tenantMapper.selectList(null); + result.put(Constants.DATA_LIST, resourceList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify tenant code + * + * @param tenantCode + * @return + */ + public Result verifyTenantCode(String tenantCode) { + Result result=new Result(); + if (checkTenant(tenantCode)) { + logger.error("tenant {} has exist, can't create again.", tenantCode); + putMsg(result, Status.TENANT_NAME_EXIST); + }else{ + putMsg(result, Status.SUCCESS); + } + return result; + } + + + /** + * check tenant exists + * + * @param tenantCode + * @return + */ + private boolean checkTenant(String tenantCode) { + return tenantMapper.queryByTenantCode(tenantCode) == null ? true : false; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java new file mode 100644 index 0000000000..102e5b070a --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java @@ -0,0 +1,328 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.UdfType; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.dolphinscheduler.dao.entity.UdfFunc; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; +import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; +import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * udf function service + */ +@Service +public class UdfFuncService extends BaseService{ + + private static final Logger logger = LoggerFactory.getLogger(UdfFuncService.class); + + @Autowired + private ResourceMapper resourceMapper; + + @Autowired + private UdfFuncMapper udfFuncMapper; + + @Autowired + private UDFUserMapper udfUserMapper; + + + /** + * create udf function + * + * @param loginUser + * @param funcName + * @param argTypes + * @param database + * @param desc + * @param type + * @param resourceId + * @return + */ + public Result createUdfFunction(User loginUser, + String funcName, + String className, + String argTypes, + String database, + String desc, + UdfType type, + int resourceId) { + Result result = new Result(); + + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + + // verify udf func name exist + if (checkUdfFuncNameExists(funcName)) { + logger.error("udf func {} has exist, can't recreate", funcName); + putMsg(result, Status.UDF_FUNCTION_EXISTS); + return result; + } + + Resource resource = resourceMapper.selectById(resourceId); + if (resource == null) { + logger.error("resourceId {} is not exist", resourceId); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + + //save data + UdfFunc udf = new UdfFunc(); + Date now = new Date(); + udf.setUserId(loginUser.getId()); + udf.setFuncName(funcName); + udf.setClassName(className); + if (StringUtils.isNotEmpty(argTypes)) { + udf.setArgTypes(argTypes); + } + if (StringUtils.isNotEmpty(argTypes)) { + udf.setDatabase(database); + } + udf.setDesc(desc); + udf.setResourceId(resourceId); + udf.setResourceName(resource.getAlias()); + udf.setType(type); + + udf.setCreateTime(now); + udf.setUpdateTime(now); + + udfFuncMapper.insert(udf); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * + * @param name + * @return + */ + private boolean checkUdfFuncNameExists(String name){ + List resource = udfFuncMapper.queryUdfByIdStr(null, name); + if(resource != null && resource.size() > 0){ + return true; + } + return false; + } + + + /** + * query udf function + */ + public Map queryUdfFuncDetail(int id) { + + Map result = new HashMap<>(5); + UdfFunc udfFunc = udfFuncMapper.selectById(id); + if (udfFunc == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + result.put(Constants.DATA_LIST, udfFunc); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * updateProcessInstance udf function + * + * @param funcName + * @param argTypes + * @param database + * @param desc + * @param type + * @param resourceId + * @return + */ + public Map updateUdfFunc(int udfFuncId, + String funcName, + String className, + String argTypes, + String database, + String desc, + UdfType type, + int resourceId) { + Map result = new HashMap<>(); + // verify udfFunc is exist + UdfFunc udf = udfFuncMapper.selectById(udfFuncId); + + // if resource upload startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + + if (udf == null) { + result.put(Constants.STATUS, Status.UDF_FUNCTION_NOT_EXIST); + result.put(Constants.MSG, Status.UDF_FUNCTION_NOT_EXIST.getMsg()); + return result; + } + + // verify udfFuncName is exist + if (!funcName.equals(udf.getFuncName())) { + if (checkUdfFuncNameExists(funcName)) { + logger.error("UdfFunc {} has exist, can't create again.", funcName); + result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS); + result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg()); + return result; + } + } + + Resource resource = resourceMapper.selectById(resourceId); + if (resource == null) { + logger.error("resourceId {} is not exist", resourceId); + result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST); + result.put(Constants.MSG, Status.RESOURCE_NOT_EXIST.getMsg()); + return result; + } + Date now = new Date(); + udf.setFuncName(funcName); + udf.setClassName(className); + if (StringUtils.isNotEmpty(argTypes)) { + udf.setArgTypes(argTypes); + } + if (StringUtils.isNotEmpty(argTypes)) { + udf.setDatabase(database); + } + udf.setDesc(desc); + udf.setResourceId(resourceId); + udf.setResourceName(resource.getAlias()); + udf.setType(type); + + + udf.setCreateTime(now); + udf.setUpdateTime(now); + + udfFuncMapper.updateById(udf); + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * query udf function list paging + * + * @param loginUser + * @param searchVal + * @param pageNo + * @param pageSize + * @return + */ + public Map queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(5); + + + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + IPage udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo); + pageInfo.setTotalCount((int)udfFuncList.getTotal()); + pageInfo.setLists(udfFuncList.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * get udf functions + * + * @param loginUser + * @param searchVal + * @param pageSize + * @param pageNo + * @return + */ + private IPage getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) { + + int userId = loginUser.getId(); + if (isAdmin(loginUser)) { + userId = 0; + } + Page page = new Page(pageNo, pageSize); + return udfFuncMapper.queryUdfFuncPaging(page, userId, searchVal); + } + + /** + * query data resource by type + * + * @param loginUser + * @param type + * @return + */ + public Map queryResourceList(User loginUser, Integer type) { + Map result = new HashMap<>(5); + List udfFuncList = udfFuncMapper.getUdfFuncByType(loginUser.getId(), type); + + result.put(Constants.DATA_LIST, udfFuncList); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete udf function + * + * @param id + */ + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) + public Result delete(int id) { + Result result = new Result(); + + udfFuncMapper.deleteById(id); + udfUserMapper.deleteByUdfFuncId(id); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * verify udf function by name + * + * @param name + * @return + */ + public Result verifyUdfFuncByName(String name) { + Result result = new Result(); + if (checkUdfFuncNameExists(name)) { + logger.error("UDF function name:{} has exist, can't create again.", name); + putMsg(result, Status.UDF_FUNCTION_EXISTS); + } else { + putMsg(result, Status.SUCCESS); + } + + return result; + } + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java new file mode 100644 index 0000000000..c9f7309e9b --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java @@ -0,0 +1,694 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.CheckUtils; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.EncryptionUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang3.StringUtils; +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.mapper.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.util.*; + +/** + * user service + */ +@Service +public class UsersService extends BaseService { + + private static final Logger logger = LoggerFactory.getLogger(UsersService.class); + + @Autowired + private UserMapper userMapper; + + @Autowired + private TenantMapper tenantMapper; + + @Autowired + private ProjectUserMapper projectUserMapper; + + @Autowired + private ResourceUserMapper resourcesUserMapper; + + @Autowired + private ResourceMapper resourceMapper; + + @Autowired + private DataSourceUserMapper datasourceUserMapper; + + @Autowired + private UDFUserMapper udfUserMapper; + + @Autowired + private AlertGroupMapper alertGroupMapper; + + + /** + * create user, only system admin have permission + * + * @param loginUser + * @param userName + * @param userPassword + * @param email + * @param tenantId + * @param phone + * @return + */ + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) + public Map createUser(User loginUser, + String userName, + String userPassword, + String email, + int tenantId, + String phone, + String queue) throws Exception { + + Map result = new HashMap<>(5); + result = CheckUtils.checkUserParams(userName, userPassword, email, phone); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + + if (check(result, checkTenant(tenantId), Status.TENANT_NOT_EXIST, Constants.STATUS)) { + return result; + } + + User user = new User(); + Date now = new Date(); + + user.setUserName(userName); + user.setUserPassword(EncryptionUtils.getMd5(userPassword)); + user.setEmail(email); + user.setTenantId(tenantId); + user.setPhone(phone); + // create general users, administrator users are currently built-in + user.setUserType(UserType.GENERAL_USER); + user.setCreateTime(now); + user.setUpdateTime(now); + if (StringUtils.isEmpty(queue)){ + queue = ""; + } + user.setQueue(queue); + + // save user + userMapper.insert(user); + + Tenant tenant = tenantMapper.queryById(tenantId); + // resource upload startup + if (PropertyUtils.getResUploadStartupState()){ + // if tenant not exists + if (!HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(tenant.getTenantCode()))){ + createTenantDirIfNotExists(tenant.getTenantCode()); + } + String userPath = HadoopUtils.getHdfsUserDir(tenant.getTenantCode(),user.getId()); + HadoopUtils.getInstance().mkdir(userPath); + } + + putMsg(result, Status.SUCCESS); + return result; + + } + + /** + * query user + * + * @param name + * @param password + * @return + */ + public User queryUser(String name, String password) { + String md5 = EncryptionUtils.getMd5(password); + return userMapper.queryUserByNamePassword(name, md5); + } + + /** + * check general user or not + * + * @param user + * @return + */ + public boolean isGeneral(User user) { + return user.getUserType() == UserType.GENERAL_USER; + } + + /** + * query user list + * + * @param loginUser + * @param searchVal + * @param pageNo + * @param pageSize + * @return + */ + public Map queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(5); + + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + + Page page = new Page(pageNo, pageSize); + + IPage scheduleList = userMapper.queryUserPaging(page, searchVal); + + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int)scheduleList.getTotal()); + pageInfo.setLists(scheduleList.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * updateProcessInstance user + * + * @param userId + * @param userName + * @param userPassword + * @param email + * @param tenantId + * @param phone + * @return + */ + public Map updateUser(int userId, + String userName, + String userPassword, + String email, + int tenantId, + String phone, + String queue) throws Exception { + Map result = new HashMap<>(5); + result.put(Constants.STATUS, false); + + User user = userMapper.selectById(userId); + + if (user == null) { + putMsg(result, Status.USER_NOT_EXIST, userId); + return result; + } + + Date now = new Date(); + + if (StringUtils.isNotEmpty(userName)) { + User tempUser = userMapper.queryByUserNameAccurately(userName); + if (tempUser != null && tempUser.getId() != userId) { + putMsg(result, Status.USER_NAME_EXIST); + return result; + } + user.setUserName(userName); + } + + if (StringUtils.isNotEmpty(userPassword)) { + user.setUserPassword(EncryptionUtils.getMd5(userPassword)); + } + + if (StringUtils.isNotEmpty(email)) { + user.setEmail(email); + } + user.setQueue(queue); + user.setPhone(phone); + user.setUpdateTime(now); + + //if user switches the tenant, the user's resources need to be copied to the new tenant + if (user.getTenantId() != tenantId) { + Tenant oldTenant = tenantMapper.queryById(user.getTenantId()); + //query tenant + Tenant newTenant = tenantMapper.queryById(tenantId); + if (newTenant != null) { + // if hdfs startup + if (PropertyUtils.getResUploadStartupState() && oldTenant != null){ + String newTenantCode = newTenant.getTenantCode(); + String oldResourcePath = HadoopUtils.getHdfsResDir(oldTenant.getTenantCode()); + String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode()); + + // if old tenant dir exists + if (HadoopUtils.getInstance().exists(oldResourcePath)){ + String newResourcePath = HadoopUtils.getHdfsResDir(newTenantCode); + String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode); + + //file resources list + List fileResourcesList = resourceMapper.queryResourceList( + null, userId, ResourceType.FILE.ordinal()); + if (CollectionUtils.isNotEmpty(fileResourcesList)) { + for (Resource resource : fileResourcesList) { + HadoopUtils.getInstance().copy(oldResourcePath + "/" + resource.getAlias(), newResourcePath, false, true); + } + } + + //udf resources + List udfResourceList = resourceMapper.queryResourceList( + null, userId, ResourceType.UDF.ordinal()); + if (CollectionUtils.isNotEmpty(udfResourceList)) { + for (Resource resource : udfResourceList) { + HadoopUtils.getInstance().copy(oldUdfsPath + "/" + resource.getAlias(), newUdfsPath, false, true); + } + } + + //Delete the user from the old tenant directory + String oldUserPath = HadoopUtils.getHdfsUserDir(oldTenant.getTenantCode(),userId); + HadoopUtils.getInstance().delete(oldUserPath, true); + }else { + // if old tenant dir not exists , create + createTenantDirIfNotExists(oldTenant.getTenantCode()); + } + + if (HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(newTenant.getTenantCode()))){ + //create user in the new tenant directory + String newUserPath = HadoopUtils.getHdfsUserDir(newTenant.getTenantCode(),user.getId()); + HadoopUtils.getInstance().mkdir(newUserPath); + }else { + // if new tenant dir not exists , create + createTenantDirIfNotExists(newTenant.getTenantCode()); + } + + } + } + user.setTenantId(tenantId); + } + + // updateProcessInstance user + userMapper.updateById(user); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete user + * + * @param loginUser + * @param id + * @return + */ + public Map deleteUserById(User loginUser, int id) throws Exception { + Map result = new HashMap<>(5); + //only admin can operate + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NOT_EXIST, id); + return result; + } + + // delete user + User user = userMapper.queryTenantCodeByUserId(id); + + if (user != null) { + if (PropertyUtils.getResUploadStartupState()) { + String userPath = HadoopUtils.getHdfsUserDir(user.getTenantCode(),id); + if (HadoopUtils.getInstance().exists(userPath)) { + HadoopUtils.getInstance().delete(userPath, true); + } + } + } + + userMapper.deleteById(id); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * grant project + * + * @param loginUser + * @param userId + * @param projectIds + * @return + */ + public Map grantProject(User loginUser, int userId, String projectIds) { + Map result = new HashMap<>(5); + result.put(Constants.STATUS, false); + + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + + //if the selected projectIds are empty, delete all items associated with the user + projectUserMapper.deleteProjectRelation(0, userId); + + if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS, Constants.MSG)) { + return result; + } + + String[] projectIdArr = projectIds.split(","); + + for (String projectId : projectIdArr) { + Date now = new Date(); + ProjectUser projectUser = new ProjectUser(); + projectUser.setUserId(userId); + projectUser.setProjectId(Integer.parseInt(projectId)); + projectUser.setPerm(7); + projectUser.setCreateTime(now); + projectUser.setUpdateTime(now); + projectUserMapper.insert(projectUser); + } + + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * grant resource + * + * @param loginUser + * @param userId + * @param resourceIds + * @return + */ + public Map grantResources(User loginUser, int userId, String resourceIds) { + Map result = new HashMap<>(5); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + User user = userMapper.selectById(userId); + if(user == null){ + putMsg(result, Status.USER_NOT_EXIST, userId); + return result; + } + + resourcesUserMapper.deleteResourceUser(userId, 0); + + if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS, Constants.MSG)) { + return result; + } + + String[] resourcesIdArr = resourceIds.split(","); + + for (String resourceId : resourcesIdArr) { + Date now = new Date(); + ResourcesUser resourcesUser = new ResourcesUser(); + resourcesUser.setUserId(userId); + resourcesUser.setResourcesId(Integer.parseInt(resourceId)); + resourcesUser.setPerm(7); + resourcesUser.setCreateTime(now); + resourcesUser.setUpdateTime(now); + resourcesUserMapper.insert(resourcesUser); + } + + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * grant udf function + * + * @param loginUser + * @param userId + * @param udfIds + * @return + */ + public Map grantUDFFunction(User loginUser, int userId, String udfIds) { + Map result = new HashMap<>(5); + + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + + udfUserMapper.deleteByUserId(userId); + + if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS, Constants.MSG)) { + return result; + } + + String[] resourcesIdArr = udfIds.split(","); + + for (String udfId : resourcesIdArr) { + Date now = new Date(); + UDFUser udfUser = new UDFUser(); + udfUser.setUserId(userId); + udfUser.setUdfId(Integer.parseInt(udfId)); + udfUser.setPerm(7); + udfUser.setCreateTime(now); + udfUser.setUpdateTime(now); + udfUserMapper.insert(udfUser); + } + + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * grant datasource + * + * @param loginUser + * @param userId + * @param datasourceIds + * @return + */ + public Map grantDataSource(User loginUser, int userId, String datasourceIds) { + Map result = new HashMap<>(5); + result.put(Constants.STATUS, false); + + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + + datasourceUserMapper.deleteByUserId(userId); + + if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS, Constants.MSG)) { + return result; + } + + String[] datasourceIdArr = datasourceIds.split(","); + + for (String datasourceId : datasourceIdArr) { + Date now = new Date(); + + DatasourceUser datasourceUser = new DatasourceUser(); + datasourceUser.setUserId(userId); + datasourceUser.setDatasourceId(Integer.parseInt(datasourceId)); + datasourceUser.setPerm(7); + datasourceUser.setCreateTime(now); + datasourceUser.setUpdateTime(now); + datasourceUserMapper.insert(datasourceUser); + } + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query user info + * + * @param loginUser + * @return + */ + public Map getUserInfo(User loginUser) { + + Map result = new HashMap<>(); + + User user = null; + if (loginUser.getUserType() == UserType.ADMIN_USER) { + user = loginUser; + } else { + user = userMapper.queryDetailsById(loginUser.getId()); + + List alertGroups = alertGroupMapper.queryByUserId(loginUser.getId()); + + StringBuilder sb = new StringBuilder(); + + if (alertGroups != null && alertGroups.size() > 0) { + for (int i = 0; i < alertGroups.size() - 1; i++) { + sb.append(alertGroups.get(i).getGroupName() + ","); + } + sb.append(alertGroups.get(alertGroups.size() - 1)); + user.setAlertGroup(sb.toString()); + } + } + + result.put(Constants.DATA_LIST, user); + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query user list + * + * @param loginUser + * @return + */ + public Map queryAllGeneralUsers(User loginUser) { + Map result = new HashMap<>(5); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + + List userList = userMapper.queryAllGeneralUser(); + result.put(Constants.DATA_LIST, userList); + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * query user list + * + * @param loginUser + * @return + */ + public Map queryUserList(User loginUser) { + Map result = new HashMap<>(5); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + + List userList = userMapper.selectList(null ); + result.put(Constants.DATA_LIST, userList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify user name exists + * + * @param userName + * @return + */ + public Result verifyUserName(String userName) { + + Result result = new Result(); + User user = userMapper.queryByUserNameAccurately(userName); + if (user != null) { + logger.error("user {} has exist, can't create again.", userName); + + putMsg(result, Status.USER_NAME_EXIST); + } else { + putMsg(result, Status.SUCCESS); + } + + return result; + } + + + /** + * unauthorized user + * + * @param loginUser + * @param alertgroupId + * @return + */ + public Map unauthorizedUser(User loginUser, Integer alertgroupId) { + + Map result = new HashMap<>(5); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + + List userList = userMapper.selectList(null); + List resultUsers = new ArrayList<>(); + Set userSet = null; + if (userList != null && userList.size() > 0) { + userSet = new HashSet<>(userList); + + List authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId); + + Set authedUserSet = null; + if (authedUserList != null && authedUserList.size() > 0) { + authedUserSet = new HashSet<>(authedUserList); + userSet.removeAll(authedUserSet); + } + resultUsers = new ArrayList<>(userSet); + } + result.put(Constants.DATA_LIST, resultUsers); + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * authorized user + * + * @param loginUser + * @param alertgroupId + * @return + */ + public Map authorizedUser(User loginUser, Integer alertgroupId) { + Map result = new HashMap<>(5); + //only admin can operate + if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { + return result; + } + List userList = userMapper.queryUserListByAlertGroupId(alertgroupId); + result.put(Constants.DATA_LIST, userList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * check + * + * @param result + * @param bool + * @param userNoOperationPerm + * @param status + * @return + */ + private boolean check(Map result, boolean bool, Status userNoOperationPerm, String status) { + //only admin can operate + if (bool) { + result.put(Constants.STATUS, userNoOperationPerm); + result.put(status, userNoOperationPerm.getMsg()); + return true; + } + return false; + } + + /** + * @param tenantId + * @return + */ + private boolean checkTenant(int tenantId) { + return tenantMapper.queryById(tenantId) == null ? true : false; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java new file mode 100644 index 0000000000..c8809908b8 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.dao.entity.WorkerGroup; +import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * work group service + */ +@Service +public class WorkerGroupService extends BaseService { + + + @Autowired + WorkerGroupMapper workerGroupMapper; + + /** + * create or update a worker group + * @param id + * @param name + * @param ipList + * @return + */ + public Map saveWorkerGroup(int id, String name, String ipList){ + + Map result = new HashMap<>(5); + + if(StringUtils.isEmpty(name)){ + putMsg(result, Status.NAME_NULL); + return result; + } + Date now = new Date(); + WorkerGroup workerGroup = null; + if(id != 0){ + workerGroup = workerGroupMapper.selectById(id); + }else{ + workerGroup = new WorkerGroup(); + workerGroup.setCreateTime(now); + } + workerGroup.setName(name); + workerGroup.setIpList(ipList); + workerGroup.setUpdateTime(now); + + if(checkWorkerGroupNameExists(workerGroup)){ + putMsg(result, Status.NAME_EXIST, workerGroup.getName()); + return result; + } + if(workerGroup.getId() != 0 ){ + workerGroupMapper.updateById(workerGroup); + }else{ + workerGroupMapper.insert(workerGroup); + } + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * check worker group name exists + * @param workerGroup + * @return + */ + private boolean checkWorkerGroupNameExists(WorkerGroup workerGroup) { + + List workerGroupList = workerGroupMapper.queryWorkerGroupByName(workerGroup.getName()); + + if(workerGroupList.size() > 0 ){ + // new group has same name.. + if(workerGroup.getId() == 0){ + return true; + } + // update group... + for(WorkerGroup group : workerGroupList){ + if(group.getId() != workerGroup.getId()){ + return true; + } + } + } + return false; + } + + /** + * query worker group paging + * @param pageNo + * @param pageSize + * @param searchVal + * @return + */ + public Map queryAllGroupPaging(Integer pageNo, Integer pageSize, String searchVal) { + + Map result = new HashMap<>(5); + + Page page = new Page(pageNo, pageSize); + IPage workerGroupIPage = workerGroupMapper.queryListPaging( + page, searchVal); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int)workerGroupIPage.getTotal()); + pageInfo.setLists(workerGroupIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete worker group by id + * @param id + * @return + */ + public Map deleteWorkerGroupById(Integer id) { + + Map result = new HashMap<>(5); + + workerGroupMapper.deleteById(id); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query all worker group + * @return + */ + public Map queryAllGroup() { + Map result = new HashMap<>(5); + List workerGroupList = workerGroupMapper.queryAllWorkerGroup(); + result.put(Constants.DATA_LIST, workerGroupList); + putMsg(result, Status.SUCCESS); + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java new file mode 100644 index 0000000000..ad03d03a39 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java @@ -0,0 +1,159 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.utils; + + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; +import org.apache.commons.lang.StringUtils; + +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Pattern; + + +/** + * check utils + */ +public class CheckUtils { + + + /** + * check username + * + * @param userName + */ + public static boolean checkUserName(String userName) { + return regexChecks(userName, org.apache.dolphinscheduler.common.Constants.REGEX_USER_NAME); + } + + /** + * check email + * + * @param email + */ + public static boolean checkEmail(String email) { + return email.length() > 5 && email.length() <= 40 && regexChecks(email, org.apache.dolphinscheduler.common.Constants.REGEX_MAIL_NAME) ; + } + + /** + * check project description + * + * @param desc + */ + public static Map checkDesc(String desc) { + Map result = new HashMap<>(); + if (StringUtils.isNotEmpty(desc) && desc.length() > 200) { + result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "desc length")); + }else{ + result.put(Constants.STATUS, Status.SUCCESS); + } + return result; + } + + /** + * check extra info + * + * @param otherParams + */ + public static boolean checkOtherParams(String otherParams) { + return StringUtils.isNotEmpty(otherParams) && !JSONUtils.checkJsonVaild(otherParams); + } + + /** + * check password + * + * @param password + */ + public static boolean checkPassword(String password) { + return StringUtils.isNotEmpty(password) && password.length() >= 2 && password.length() <= 20; + } + + /** + * check phone + * + * @param phone + */ + public static boolean checkPhone(String phone) { + return StringUtils.isNotEmpty(phone) && phone.length() > 18; + } + + + /** + * check task node parameter + * + * @param parameter + * @param taskType + * @return + */ + public static boolean checkTaskNodeParameters(String parameter, String taskType) { + AbstractParameters abstractParameters = TaskParametersUtils.getParameters(taskType, parameter); + + if (abstractParameters != null) { + return abstractParameters.checkParameters(); + } + + return false; + } + + /** + * check params + * @param userName + * @param password + * @param email + * @param phone + * @return + */ + public static Map checkUserParams(String userName, String password, String email, String phone){ + Map result = new HashMap<>(); + try{ + CheckUtils.checkUserName(userName); + CheckUtils.checkEmail(email); + CheckUtils.checkPassword(password); + CheckUtils.checkPhone(phone); + result.put(Constants.STATUS, Status.SUCCESS); + result.put(Constants.MSG, Status.SUCCESS.getMsg()); + }catch (Exception e){ + result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + result.put(Constants.MSG, e.getMessage()); + } + return result; + } + + + + + + /** + * 正则匹配 + * + * @param str + * @param pattern + * @return + */ + private static boolean regexChecks(String str, Pattern pattern) { + if (org.apache.commons.lang3.StringUtils.isEmpty(str)) { + return false; + } + + return pattern.matcher(str).matches(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Constants.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Constants.java new file mode 100644 index 0000000000..70d6ceaab1 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Constants.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.utils; + +/** + * web application constants + */ +public class Constants { + + /** + * status + */ + public static final String STATUS = "status"; + + /** + * message + */ + public static final String MSG = "msg"; + + /** + * data total + * 数据总数 + */ + public static final String COUNT = "count"; + + /** + * page size + * 每页数据条数 + */ + public static final String PAGE_SIZE = "pageSize"; + + /** + * current page no + * 当前页码 + */ + public static final String PAGE_NUMBER = "pageNo"; + + /** + * result + */ + public static final String RESULT = "result"; + + /** + * + */ + public static final String DATA_LIST = "data"; + + public static final String TOTAL_LIST = "totalList"; + + public static final String CURRENT_PAGE = "currentPage"; + + public static final String TOTAL_PAGE = "totalPage"; + + public static final String TOTAL = "total"; + + /** + * session user + */ + public static final String SESSION_USER = "session.user"; + + public static final String SESSION_ID = "sessionId"; + + public static final String PASSWORD_DEFAULT = "******"; + + /** + * driver + */ + public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver"; + public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver"; + public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver"; + public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver"; + public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver"; + public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; + + /** + * database type + */ + public static final String MYSQL = "MYSQL"; + public static final String POSTGRESQL = "POSTGRESQL"; + public static final String HIVE = "HIVE"; + public static final String SPARK = "SPARK"; + public static final String CLICKHOUSE = "CLICKHOUSE"; + public static final String ORACLE = "ORACLE"; + public static final String SQLSERVER = "SQLSERVER"; + + /** + * jdbc url + */ + public static final String JDBC_MYSQL = "jdbc:mysql://"; + public static final String JDBC_POSTGRESQL = "jdbc:postgresql://"; + public static final String JDBC_HIVE_2 = "jdbc:hive2://"; + public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://"; + public static final String JDBC_ORACLE = "jdbc:oracle:thin:@//"; + public static final String JDBC_SQLSERVER = "jdbc:sqlserver://"; + + + public static final String ADDRESS = "address"; + public static final String DATABASE = "database"; + public static final String JDBC_URL = "jdbcUrl"; + public static final String PRINCIPAL = "principal"; + public static final String USER = "user"; + public static final String PASSWORD = "password"; + public static final String OTHER = "other"; + + + /** + * session timeout + */ + public static final int SESSION_TIME_OUT = 7200; + public static final int maxFileSize = 1024 * 1024 * 1024; + public static final String UDF = "UDF"; + public static final String CLASS = "class"; + public static final String RECEIVERS = "receivers"; + public static final String RECEIVERS_CC = "receiversCc"; +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java new file mode 100644 index 0000000000..6e0d61ae0f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.io.Resource; +import org.springframework.core.io.UrlResource; +import org.springframework.web.multipart.MultipartFile; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +/** + * file utils + */ +public class FileUtils { + private static final Logger logger = LoggerFactory.getLogger(FileUtils.class); + + /** + * copy source file to target file + * + * @param file + * @param destFilename + */ + + public static void copyFile(MultipartFile file, String destFilename) { + try { + + File destFile = new File(destFilename); + File destParentDir = new File(destFile.getParent()); + + if (!destParentDir.exists()) { + org.apache.commons.io.FileUtils.forceMkdir(destParentDir); + } + + Files.copy(file.getInputStream(), Paths.get(destFilename)); + } catch (IOException e) { + logger.error(String.format("failed to copy file , {} is empty file", file.getOriginalFilename()), e); + } + } + + /** + * file to resource + * + * @param filename + * @return + */ + public static Resource file2Resource(String filename) throws MalformedURLException { + Path file = Paths.get(filename); + + Resource resource = new UrlResource(file.toUri()); + if (resource.exists() || resource.isReadable()) { + return resource; + } else { + logger.error("file can not read : {}", filename); + + } + return null; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java new file mode 100644 index 0000000000..cd2d66dfd0 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.utils; + +import java.util.List; + +/** + * page info + * + * @param + */ +public class PageInfo { + + /** + * list + */ + private List lists; + /** + * total count + */ + private Integer totalCount = 0; + /** + * page size + */ + private Integer pageSize = 20; + /** + * current page + */ + private Integer currentPage = 0; + /** + * pageNo + */ + private Integer pageNo; + + public PageInfo(Integer currentPage,Integer pageSize){ + if(currentPage==null){ + currentPage=1; + } + this.pageNo=(currentPage-1)*pageSize; + this.pageSize=pageSize; + this.currentPage=currentPage; + } + + public Integer getStart() { + return pageNo; + } + + public void setStart(Integer start) { + this.pageNo = start; + } + + public Integer getTotalPage() { + if (pageSize==null||pageSize == 0) { + pageSize = 7; + } + if (this.totalCount % this.pageSize == 0) { + return (this.totalCount / this.pageSize)==0?1:(this.totalCount / this.pageSize); + } + return (this.totalCount / this.pageSize + 1); + } + + public List getLists() { + return lists; + } + + public void setLists(List lists) { + this.lists = lists; + } + + public Integer getTotalCount() { + if (totalCount==null) { + totalCount = 0; + } + return totalCount; + } + + public void setTotalCount(Integer totalCount) { + this.totalCount = totalCount; + } + + public Integer getPageSize() { + if (pageSize==null||pageSize == 0) { + pageSize = 7; + } + return pageSize; + } + + public void setPageSize(Integer pageSize) { + this.pageSize = pageSize; + } + + public void setCurrentPage(Integer currentPage) { + this.currentPage = currentPage; + } + + public Integer getCurrentPage() { + if (currentPage==null||currentPage <= 0) { + this.currentPage = 1; + } + return this.currentPage; + } + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java new file mode 100644 index 0000000000..9fc26c5246 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.utils; + +/** + * result + * + * @param + */ +public class Result { + /** + * status + * 状态码 + */ + private Integer code; + + /** + * message + * 消息 + */ + private String msg; + + /** + * data + */ + private T data; + + public Result(){} + + public Result(Integer code , String msg){ + this.code = code; + this.msg = msg; + } + + public Integer getCode() { + return code; + } + + public void setCode(Integer code) { + this.code = code; + } + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + public T getData() { + return data; + } + + public void setData(T data) { + this.data = data; + } + + + @Override + public String toString() { + return "Status{" + + "code='" + code + '\'' + + ", msg='" + msg + '\'' + + ", data=" + data + + '}'; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java new file mode 100644 index 0000000000..66fde70f9a --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZooKeeperState.java @@ -0,0 +1,211 @@ +package org.apache.dolphinscheduler.api.utils; + +import org.apache.commons.lang3.StringUtils; +import org.apache.zookeeper.client.FourLetterWordMain; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Scanner; + +/** + * zookeeper状态监控:4字口诀 + * + */ +public class ZooKeeperState { + + private static final Logger logger = LoggerFactory.getLogger(ZooKeeperState.class); + + private final String host; + private final int port; + + private int minLatency = -1, avgLatency = -1, maxLatency = -1; + private long received = -1; + private long sent = -1; + private int outStanding = -1; + private long zxid = -1; + private String mode = null; + private int nodeCount = -1; + private int watches = -1; + private int connections = -1; + + public ZooKeeperState(String connectionString) { + String host = connectionString.substring(0, + connectionString.indexOf(':')); + int port = Integer.parseInt(connectionString.substring(connectionString + .indexOf(':') + 1)); + this.host = host; + this.port = port; + } + + public void getZookeeperInfo() { + String content = cmd("srvr"); + if (StringUtils.isNotBlank(content)) { + Scanner scannerForStat = new Scanner(content); + while (scannerForStat.hasNext()) { + String line = scannerForStat.nextLine(); + if (line.startsWith("Latency min/avg/max:")) { + String[] latencys = getStringValueFromLine(line).split("/"); + minLatency = Integer.parseInt(latencys[0]); + avgLatency = Integer.parseInt(latencys[1]); + maxLatency = Integer.parseInt(latencys[2]); + } else if (line.startsWith("Received:")) { + received = Long.parseLong(getStringValueFromLine(line)); + } else if (line.startsWith("Sent:")) { + sent = Long.parseLong(getStringValueFromLine(line)); + } else if (line.startsWith("Outstanding:")) { + outStanding = Integer.parseInt(getStringValueFromLine(line)); + } else if (line.startsWith("Zxid:")) { + zxid = Long.parseLong(getStringValueFromLine(line).substring(2), 16); + } else if (line.startsWith("Mode:")) { + mode = getStringValueFromLine(line); + } else if (line.startsWith("Node count:")) { + nodeCount = Integer.parseInt(getStringValueFromLine(line)); + } + } + scannerForStat.close(); + } + + String wchsText = cmd("wchs"); + if (StringUtils.isNotBlank(wchsText)) { + Scanner scannerForWchs = new Scanner(wchsText); + while (scannerForWchs.hasNext()) { + String line = scannerForWchs.nextLine(); + if (line.startsWith("Total watches:")) { + watches = Integer.parseInt(getStringValueFromLine(line)); + } + } + scannerForWchs.close(); + } + + String consText = cmd("cons"); + if (StringUtils.isNotBlank(consText)) { + Scanner scannerForCons = new Scanner(consText); + if (StringUtils.isNotBlank(consText)) { + connections = 0; + } + while (scannerForCons.hasNext()) { + @SuppressWarnings("unused") + String line = scannerForCons.nextLine(); + ++connections; + } + scannerForCons.close(); + } + } + + + public boolean ruok() { + return "imok\n".equals(cmd("ruok")); + } + + + private String getStringValueFromLine(String line) { + return line.substring(line.indexOf(":") + 1, line.length()).replaceAll( + " ", "").trim(); + } + + private class SendThread extends Thread { + private String cmd; + + public String ret = ""; + + public SendThread(String cmd) { + this.cmd = cmd; + } + + @Override + public void run() { + try { + ret = FourLetterWordMain.send4LetterWord(host, port, cmd); + } catch (IOException e) { + logger.error(e.getMessage(),e); + return; + } + } + + } + + private String cmd(String cmd) { + final int waitTimeout = 5; + SendThread sendThread = new SendThread(cmd); + sendThread.setName("FourLetterCmd:" + cmd); + sendThread.start(); + try { + sendThread.join(waitTimeout * 1000); + return sendThread.ret; + } catch (InterruptedException e) { + logger.error("send " + cmd + " to server " + host + ":" + port + " failed!", e); + } + return ""; + } + + public Logger getLogger() { + return logger; + } + + public String getHost() { + return host; + } + + public int getPort() { + return port; + } + + public int getMinLatency() { + return minLatency; + } + + public int getAvgLatency() { + return avgLatency; + } + + public int getMaxLatency() { + return maxLatency; + } + + public long getReceived() { + return received; + } + + public long getSent() { + return sent; + } + + public int getOutStanding() { + return outStanding; + } + + public long getZxid() { + return zxid; + } + + public String getMode() { + return mode; + } + + public int getNodeCount() { + return nodeCount; + } + + public int getWatches() { + return watches; + } + + public int getConnections() { + return connections; + } + + @Override + public String toString() { + return "ZooKeeperState [host=" + host + ", port=" + port + + ", minLatency=" + minLatency + ", avgLatency=" + avgLatency + + ", maxLatency=" + maxLatency + ", received=" + received + + ", sent=" + sent + ", outStanding=" + outStanding + ", zxid=" + + zxid + ", mode=" + mode + ", nodeCount=" + nodeCount + + ", watches=" + watches + ", connections=" + + connections + "]"; + } + + + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitor.java new file mode 100644 index 0000000000..4b8623ea8b --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitor.java @@ -0,0 +1,89 @@ +package org.apache.dolphinscheduler.api.utils; + +import org.apache.dolphinscheduler.common.enums.ZKNodeType; +import org.apache.dolphinscheduler.common.zk.AbstractZKClient; +import org.apache.dolphinscheduler.common.model.MasterServer; +import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + + +/** + * monitor zookeeper info + */ +public class ZookeeperMonitor extends AbstractZKClient{ + + private static final Logger LOG = LoggerFactory.getLogger(ZookeeperMonitor.class); + private static final String zookeeperList = AbstractZKClient.getZookeeperQuorum(); + + /** + * + * @return zookeeper info list + */ + public static List zookeeperInfoList(){ + String zookeeperServers = zookeeperList.replaceAll("[\\t\\n\\x0B\\f\\r]", ""); + try{ + return zookeeperInfoList(zookeeperServers); + }catch(Exception e){ + LOG.error(e.getMessage(),e); + } + return null; + } + + /** + * get master servers + * @return + */ + public List getMasterServers(){ + return getServersList(ZKNodeType.MASTER); + } + + /** + * master construct is the same with worker, use the master instead + * @return + */ + public List getWorkerServers(){ + return getServersList(ZKNodeType.WORKER); + } + + private static List zookeeperInfoList(String zookeeperServers) { + + List list = new ArrayList<>(5); + + if(StringUtils.isNotBlank(zookeeperServers)){ + String[] zookeeperServersArray = zookeeperServers.split(","); + + for (String zookeeperServer : zookeeperServersArray) { + ZooKeeperState state = new ZooKeeperState(zookeeperServer); + boolean ok = state.ruok(); + if(ok){ + state.getZookeeperInfo(); + } + + String hostName = zookeeperServer; + int connections = state.getConnections(); + int watches = state.getWatches(); + long sent = state.getSent(); + long received = state.getReceived(); + String mode = state.getMode(); + int minLatency = state.getMinLatency(); + int avgLatency = state.getAvgLatency(); + int maxLatency = state.getMaxLatency(); + int nodeCount = state.getNodeCount(); + int status = ok ? 1 : 0; + Date date = new Date(); + + ZookeeperRecord zookeeperRecord = new ZookeeperRecord(hostName,connections,watches,sent,received,mode,minLatency,avgLatency,maxLatency,nodeCount,status,date); + list.add(zookeeperRecord); + + } + } + + return list; + } +} diff --git a/dolphinscheduler-api/src/main/resources/apiserver_logback.xml b/dolphinscheduler-api/src/main/resources/apiserver_logback.xml new file mode 100644 index 0000000000..0d27889a65 --- /dev/null +++ b/dolphinscheduler-api/src/main/resources/apiserver_logback.xml @@ -0,0 +1,42 @@ + + + + + + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + INFO + + ${log.base}/dolphinscheduler-api-server.log + + ${log.base}/dolphinscheduler-api-server.%d{yyyy-MM-dd_HH}.%i.log + 168 + 64MB + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/resources/application.properties b/dolphinscheduler-api/src/main/resources/application.properties new file mode 100644 index 0000000000..f956326cc5 --- /dev/null +++ b/dolphinscheduler-api/src/main/resources/application.properties @@ -0,0 +1,19 @@ +# server port +server.port=12345 + +# session config +server.servlet.session.timeout=7200 + +server.servlet.context-path=/dolphinscheduler/ + +# file size limit for upload +spring.servlet.multipart.max-file-size=1024MB +spring.servlet.multipart.max-request-size=1024MB + +#post content +server.jetty.max-http-post-size=5000000 + +spring.messages.encoding=UTF-8 + +#i18n classpath folder , file prefix messages, if have many files, use "," seperator +spring.messages.basename=i18n/messages diff --git a/dolphinscheduler-api/src/main/resources/combined_logback.xml b/dolphinscheduler-api/src/main/resources/combined_logback.xml new file mode 100644 index 0000000000..0a5bd7ee24 --- /dev/null +++ b/dolphinscheduler-api/src/main/resources/combined_logback.xml @@ -0,0 +1,62 @@ + + + + + + + %highlight([%level]) %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{10}:[%line] - %msg%n + + UTF-8 + + + + + INFO + + + + taskAppId + ${log.base} + + + + ${log.base}/${taskAppId}.log + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + true + + + + + + ${log.base}/dolphinscheduler-combined.log + + INFO + + + + ${log.base}/dolphinscheduler-combined.%d{yyyy-MM-dd_HH}.%i.log + 168 + 200MB + +       + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + +    + + + + + + + + + \ No newline at end of file diff --git a/escheduler-api/src/main/resources/i18n/messages.properties b/dolphinscheduler-api/src/main/resources/i18n/messages.properties similarity index 100% rename from escheduler-api/src/main/resources/i18n/messages.properties rename to dolphinscheduler-api/src/main/resources/i18n/messages.properties diff --git a/escheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties similarity index 100% rename from escheduler-api/src/main/resources/i18n/messages_en_US.properties rename to dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties diff --git a/escheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties similarity index 100% rename from escheduler-api/src/main/resources/i18n/messages_zh_CN.properties rename to dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/HttpClientTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/HttpClientTest.java new file mode 100644 index 0000000000..f01decc013 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/HttpClientTest.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api; + +import org.apache.http.NameValuePair; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.message.BasicNameValuePair; +import org.apache.http.util.EntityUtils; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.URI; +import java.util.ArrayList; +import java.util.List; + +public class HttpClientTest { + + private static final Logger logger = LoggerFactory.getLogger(HttpClientTest.class); + + @Test + public void doPOSTParam()throws Exception{ + // create HttpClient + CloseableHttpClient httpclient = HttpClients.createDefault(); + + // create http post request + HttpPost httpPost = new HttpPost("http://localhost:12345/escheduler/projects/create"); + httpPost.setHeader("token", "123"); + // set parameters + List parameters = new ArrayList(); + parameters.add(new BasicNameValuePair("projectName", "qzw")); + parameters.add(new BasicNameValuePair("desc", "qzw")); + + UrlEncodedFormEntity formEntity = new UrlEncodedFormEntity(parameters); + httpPost.setEntity(formEntity); + + + CloseableHttpResponse response = null; + try { + // execute + response = httpclient.execute(httpPost); + // response status code 200 + if (response.getStatusLine().getStatusCode() == 200) { + String content = EntityUtils.toString(response.getEntity(), "UTF-8"); + logger.info(content); + } + } finally { + if (response != null) { + response.close(); + } + httpclient.close(); + } + } + + /** + * do get param path variables chinese + * @throws Exception + */ + @Test + public void doGETParamPathVariableAndChinese()throws Exception{ + // create HttpClient + CloseableHttpClient httpclient = HttpClients.createDefault(); + + List parameters = new ArrayList(); + // parameters.add(new BasicNameValuePair("pageSize", "10")); + + // define the parameters of the request + URI uri = new URIBuilder("http://localhost:12345/escheduler/projects/%E5%85%A8%E9%83%A8%E6%B5%81%E7%A8%8B%E6%B5%8B%E8%AF%95/process/list") + .build(); + + // create http GET request + HttpGet httpGet = new HttpGet(uri); + httpGet.setHeader("token","10f5625a2a1cbf9aa710653796c5d764"); + //response object + CloseableHttpResponse response = null; + try { + // execute http get request + response = httpclient.execute(httpGet); + // response status code 200 + if (response.getStatusLine().getStatusCode() == 200) { + String content = EntityUtils.toString(response.getEntity(), "UTF-8"); + logger.info("start--------------->"); + logger.info(content); + logger.info("end----------------->"); + } + } finally { + if (response != null) { + response.close(); + } + httpclient.close(); + } + } + + /** + * + * do get param + * @throws Exception + */ + @Test + public void doGETParam()throws Exception{ + // create HttpClient + CloseableHttpClient httpclient = HttpClients.createDefault(); + + List parameters = new ArrayList(); + parameters.add(new BasicNameValuePair("startDate", "2018-04-22 19:30:08")); + parameters.add(new BasicNameValuePair("endDate", "2028-04-22 19:30:08")); + parameters.add(new BasicNameValuePair("projectId", "0")); + + // define the parameters of the request + URI uri = new URIBuilder("http://localhost:12345/escheduler/projects/analysis/queue-count") + .setParameters(parameters) + .build(); + + // create http GET request + HttpGet httpGet = new HttpGet(uri); + httpGet.setHeader("token","2aef24c052c212fab9eec78848c2258b"); + //response object + CloseableHttpResponse response = null; + try { + // execute http get request + response = httpclient.execute(httpGet); + // response status code 200 + if (response.getStatusLine().getStatusCode() == 200) { + String content = EntityUtils.toString(response.getEntity(), "UTF-8"); + logger.info("start--------------->"); + logger.info(content); + logger.info("end----------------->"); + } + } finally { + if (response != null) { + response.close(); + } + httpclient.close(); + } + } + +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java new file mode 100644 index 0000000000..4ed05dfc83 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.service.SessionService; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.commons.lang3.StringUtils; +import org.junit.*; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.test.web.servlet.setup.MockMvcBuilders; +import org.springframework.web.context.WebApplicationContext; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; + +@Ignore +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class AbstractControllerTest { + private static Logger logger = LoggerFactory.getLogger(AbstractControllerTest.class); + public static final String SESSION_ID = "sessionId"; + + protected MockMvc mockMvc; + + @Autowired + private WebApplicationContext webApplicationContext; + + @Autowired + private SessionService sessionService; + + protected User user; + protected String sessionId; + + @Before + public void setUp() { + mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); + createSession(); + } + + + @After + public void after(){ + sessionService.signOut("127.0.0.1", user); + } + + + private void createSession(){ + + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.GENERAL_USER); + + user = loginUser; + + String session = sessionService.createSession(loginUser, "127.0.0.1"); + sessionId = session; + + Assert.assertTrue(StringUtils.isNotEmpty(session)); + + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java new file mode 100644 index 0000000000..c15fe3c1c4 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.http.MediaType; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.test.web.servlet.setup.MockMvcBuilders; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; +import org.springframework.web.context.WebApplicationContext; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +@Ignore +@RunWith(SpringRunner.class) +@SpringBootTest +public class DataAnalysisControllerTest { + private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); + + private MockMvc mockMvc; + + @Autowired + private WebApplicationContext webApplicationContext; + + @Before + public void setUp() { + mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); + } + + @Test + public void countTaskState() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("startDate","2019-02-01 00:00:00"); + paramsMap.add("endDate","2019-02-28 00:00:00"); + paramsMap.add("projectId","21"); + + MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/task-state-count") + .header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + public void countProcessInstanceState() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("startDate","2019-02-01 00:00:00"); + paramsMap.add("endDate","2019-02-28 00:00:00"); + paramsMap.add("projectId","21"); + + MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/process-state-count") + .header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java new file mode 100644 index 0000000000..450a259e56 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + + +/** + * data source controller test + */ +public class DataSourceControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(DataSourceControllerTest.class); + + + @Test + public void queryDataSource() throws Exception { + MvcResult mvcResult = mockMvc.perform(get("/datasources/list").header("sessionId", sessionId).param("type","HIVE")) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Ignore + @Test + public void connectDataSource() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("name","hive data source"); + paramsMap.add("type","HIVE"); + paramsMap.add("host","192.168.xx.xx"); + paramsMap.add("port","10000"); + paramsMap.add("database","default"); + paramsMap.add("userName","hive"); + paramsMap.add("password",""); + paramsMap.add("other",""); + MvcResult mvcResult = mockMvc.perform(post("/datasources/connect") + .header("sessionId", sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java new file mode 100644 index 0000000000..ebbf1070bd --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * executor controller test + */ +@Ignore +public class ExecutorControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(ExecutorControllerTest.class); + + + @Test + public void startCheckProcessDefinition() throws Exception { + + MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/executors/start-check","project_test1") + .header(SESSION_ID, sessionId) + .param("processDefinitionId","226")) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + public void getReceiverCc() throws Exception { + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + //paramsMap.add("processDefinitionId","4"); + paramsMap.add("processInstanceId","13"); + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/executors/get-receiver-cc","li_sql_test") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java new file mode 100644 index 0000000000..d9aaeb3e8b --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoggerControllerTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; + +/** + * logger controller test + */ +public class LoggerControllerTest extends AbstractControllerTest { + + private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); + + @Test + public void queryLog() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("taskInstId","-1"); + paramsMap.add("skipLineNum","0"); + paramsMap.add("limit","1000"); + + MvcResult mvcResult = mockMvc.perform(get("/log/detail") + .header("sessionId", sessionId) + .params(paramsMap)) +// .andExpect(status().isOk()) +// .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java new file mode 100644 index 0000000000..4b808e00e6 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * login controller test + */ +public class LoginControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); + + + + @Test + public void login() throws Exception { + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("userName","admin"); + paramsMap.add("userPassword","escheduler123"); + + MvcResult mvcResult = mockMvc.perform(post("/login") + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java new file mode 100644 index 0000000000..941c4ce638 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java @@ -0,0 +1,82 @@ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import com.alibaba.fastjson.JSONObject; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * monitor controller test + */ +public class MonitorControllerTest extends AbstractControllerTest { + + private static final Logger logger = LoggerFactory.getLogger(MonitorControllerTest.class); + + + @Test + public void listMaster() throws Exception { + + MvcResult mvcResult = mockMvc.perform(get("/monitor/master/list") + .header(SESSION_ID, sessionId) + /* .param("type", ResourceType.FILE.name())*/ ) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + result.getCode().equals(Status.SUCCESS.getCode()); + + + JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + + @Test + public void queryDatabaseState() throws Exception { + MvcResult mvcResult = mockMvc.perform(get("/monitor/database") + .header(SESSION_ID, sessionId) + /* .param("type", ResourceType.FILE.name())*/ ) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + result.getCode().equals(Status.SUCCESS.getCode()); + + + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + + @Test + public void queryZookeeperState() throws Exception { + MvcResult mvcResult = mockMvc.perform(get("/monitor/zookeeper/list") + .header(SESSION_ID, sessionId) + /* .param("type", ResourceType.FILE.name())*/ ) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + result.getCode().equals(Status.SUCCESS.getCode()); + + + + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java new file mode 100644 index 0000000000..e6b0f6a2a1 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * process definition controller test + */ +public class ProcessDefinitionControllerTest extends AbstractControllerTest{ + + private static Logger logger = LoggerFactory.getLogger(ProcessDefinitionControllerTest.class); + + @Test + public void createProcessDefinition() throws Exception { + String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; + String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("name","dag_test"); + paramsMap.add("processDefinitionJson",json); + paramsMap.add("locations", locations); + paramsMap.add("connects", "[]"); + paramsMap.add("desc", "desc test"); + + MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/process/save","project_test1") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isCreated()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java new file mode 100644 index 0000000000..0ad0222953 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * process instance controller test + */ +public class ProcessInstanceControllerTest extends AbstractControllerTest { + private static Logger logger = LoggerFactory.getLogger(ProcessInstanceControllerTest.class); + + + @Test + public void queryTaskListByProcessId() throws Exception { + + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/task-list-by-process-id","project_test1") + .header(SESSION_ID, sessionId) + .param("processInstanceId","-1")) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java new file mode 100644 index 0000000000..a6c3138f0c --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * project controller + */ +public class ProjectControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(ProcessInstanceControllerTest.class); + + + @Test + public void createProject() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("projectName","project_test1"); + paramsMap.add("desc","the test project"); + + MvcResult mvcResult = mockMvc.perform(post("/projects/create") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isCreated()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java new file mode 100644 index 0000000000..7a24b24efc --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/QueueControllerTest.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * queue controller test + */ +public class QueueControllerTest extends AbstractControllerTest{ + + private static Logger logger = LoggerFactory.getLogger(QueueControllerTest.class); + + @Test + public void queryList() throws Exception { + + MvcResult mvcResult = mockMvc.perform(get("/queue/list") + .header(SESSION_ID, sessionId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + public void queryPagingList() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + //paramsMap.add("processInstanceId","1380"); + paramsMap.add("searchVal",""); + paramsMap.add("pageNo","1"); + paramsMap.add("pageSize","20"); + + MvcResult mvcResult = mockMvc.perform(get("/queue/list-paging") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + public void createQueue() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("queue","ait"); + paramsMap.add("queueName","ait"); + + MvcResult mvcResult = mockMvc.perform(post("/queue/create") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isCreated()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); +// Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + + + } + + @Test + public void updateQueue() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("id","2"); + paramsMap.add("queue","ait12"); + paramsMap.add("queueName","aitName"); + + MvcResult mvcResult = mockMvc.perform(post("/queue/update") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isCreated()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + //Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + public void verifyQueue() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("queue","ait123"); + paramsMap.add("queueName","aitName"); + + MvcResult mvcResult = mockMvc.perform(post("/queue/verify-queue") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + //Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java new file mode 100644 index 0000000000..ef8fc723bc --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import com.alibaba.fastjson.JSONObject; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * resources controller test + */ +public class ResourcesControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(ResourcesControllerTest.class); + + @Test + public void querytResourceList() throws Exception { + + MvcResult mvcResult = mockMvc.perform(get("/resources/list") + .header(SESSION_ID, sessionId) + .param("type", ResourceType.FILE.name())) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + result.getCode().equals(Status.SUCCESS.getCode()); + JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + public void verifyResourceName() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("name","list_resources_1.sh"); + paramsMap.add("type","FILE"); + + MvcResult mvcResult = mockMvc.perform(get("/resources/verify-name") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + + Assert.assertEquals(Status.TENANT_NOT_EXIST.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java new file mode 100644 index 0000000000..0428ced85d --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * scheduler controller test + */ +public class SchedulerControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); + + @Test + public void queryScheduleList() throws Exception { + MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/list","project_test1") + .header(SESSION_ID, sessionId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + + @Test + public void previewSchedule() throws Exception { + MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/preview","li_test_1") + .header(SESSION_ID, sessionId) + .param("schedule","{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")) + .andExpect(status().isCreated()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java new file mode 100644 index 0000000000..50b8c140a2 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * task instance controller test + */ +public class TaskInstanceControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); + + @Test + public void queryTaskListPaging() throws Exception { + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + //paramsMap.add("processInstanceId","1380"); + paramsMap.add("searchVal",""); + paramsMap.add("taskName",""); + //paramsMap.add("stateType",""); + paramsMap.add("startDate","2019-02-26 19:48:00"); + paramsMap.add("endDate","2019-02-26 19:48:22"); + paramsMap.add("pageNo","1"); + paramsMap.add("pageSize","20"); + + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/task-instance/list-paging","project_test1") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java new file mode 100644 index 0000000000..dd0544d9ba --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * tenant controller test + */ +public class TenantControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); + + + @Test + public void countTaskState() throws Exception { + + MvcResult mvcResult = mockMvc.perform(get("/tenant/list") + .header(SESSION_ID, sessionId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java new file mode 100644 index 0000000000..b47dd9ec89 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.controller; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * users controller test + */ +public class UsersControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(QueueControllerTest.class); + + + @Test + public void queryList() throws Exception { + + MvcResult mvcResult = mockMvc.perform(get("/users/list") + .header(SESSION_ID, sessionId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java new file mode 100644 index 0000000000..b726bb2d28 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class DataAnalysisServiceTest { + private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceTest.class); + + @Autowired + private DataAnalysisService dataAnalysisService; + + @Test + public void countDefinitionByUser(){ + User loginUser = new User(); + loginUser.setId(27); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = dataAnalysisService.countDefinitionByUser(loginUser, 21); + Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); + } + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java new file mode 100644 index 0000000000..0db97a6ccc --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class DataSourceServiceTest { + private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceTest.class); + + @Autowired + private DataSourceService dataSourceService; + + @Test + public void queryDataSourceList(){ + + User loginUser = new User(); + loginUser.setId(27); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = dataSourceService.queryDataSourceList(loginUser, DbType.MYSQL.ordinal()); + Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java new file mode 100644 index 0000000000..a1f4465098 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class ExecutorServiceTest { + private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class); + + @Autowired + private ExecutorService executorService; + + @Ignore + @Test + public void startCheckByProcessDefinedId(){ + + Map map = executorService.startCheckByProcessDefinedId(1234); + Assert.assertNull(map); + + } + + + @Test + public void putMsgWithParamsTest() { + + Map map = new HashMap<>(5); + putMsgWithParams(map, Status.PROJECT_ALREADY_EXISTS); + + logger.info(map.toString()); + } + + + void putMsgWithParams(Map result, Status status,Object ... statusParams) { + result.put(Constants.STATUS, status); + if(statusParams != null && statusParams.length > 0){ + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + }else { + result.put(Constants.MSG, status.getMsg()); + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java new file mode 100644 index 0000000000..798e064d79 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class LoggerServiceTest { + private static final Logger logger = LoggerFactory.getLogger(LoggerServiceTest.class); + + @Autowired + private LoggerService loggerService; + + @Test + public void queryDataSourceList(){ + + User loginUser = new User(); + loginUser.setId(27); + loginUser.setUserType(UserType.GENERAL_USER); + + Result result = loggerService.queryLog(-1, 0, 100); + + Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java new file mode 100644 index 0000000000..c132772929 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import com.alibaba.fastjson.JSON; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class ProcessDefinitionServiceTest { + private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceTest.class); + + @Autowired + ProcessDefinitionService processDefinitionService; + + @Test + public void queryProccessDefinitionList() throws Exception { + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + Map map = processDefinitionService.queryProccessDefinitionList(loginUser,"project_test1"); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + logger.info(JSON.toJSONString(map)); + } + + @Test + public void queryProcessDefinitionListPagingTest() throws Exception { + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "",1, 5,0); + + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + logger.info(JSON.toJSONString(map)); + } + + @Test + public void deleteProcessDefinitionByIdTest() throws Exception { + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = processDefinitionService.deleteProcessDefinitionById(loginUser, "li_sql_test", 6); + + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + logger.info(JSON.toJSONString(map)); + } + + @Test + public void batchDeleteProcessDefinitionByIds() throws Exception { + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = processDefinitionService.batchDeleteProcessDefinitionByIds(loginUser, "li_test_1", "2,3"); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + logger.info(JSON.toJSONString(map)); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java new file mode 100644 index 0000000000..b3f7d52a35 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import com.alibaba.fastjson.JSON; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.io.IOException; +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class ProcessInstanceServiceTest { + private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceServiceTest.class); + + @Autowired + ProcessInstanceService processInstanceService; + + @Test + public void viewVariables() { + try { + Map map = processInstanceService.viewVariables(-1); + Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); + logger.info(JSON.toJSONString(map)); + }catch (Exception e){ + logger.error(e.getMessage(), e); + } + } + + @Test + public void testDependResult(){ + String logString = "[INFO] 2019-03-19 17:11:08.475 cn.escheduler.server.worker.log.TaskLogger:[172] - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n" + + "[INFO] 2019-03-19 17:11:08.476 cn.escheduler.server.worker.runner.TaskScheduleThread:[172] - task : 223_10739_452334 exit status code : 0\n" + + "[root@node2 current]# "; + try { + Map resultMap = + processInstanceService.parseLogForDependentResult(logString); + Assert.assertEquals(resultMap.size() , 1); + } catch (IOException e) { + + } + } + + @Test + public void queryProcessInstanceList() throws Exception { + + User loginUser = new User(); + loginUser.setId(27); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = processInstanceService.queryProcessInstanceList(loginUser, "project_test1", 0, "", "", "", ExecutionStatus.FAILURE, "", 1, 5); + + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + logger.info(JSON.toJSONString(map)); + } + + @Test + public void batchDeleteProcessInstanceByIds() throws Exception { + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = processInstanceService.batchDeleteProcessInstanceByIds(loginUser, "li_test_1", "4,2,300"); + + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + logger.info(JSON.toJSONString(map)); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java new file mode 100644 index 0000000000..c372db7450 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class ResourcesServiceTest { + private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceTest.class); + + @Autowired + private ResourcesService resourcesService; + + @Test + public void querytResourceList(){ + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + Map map = resourcesService.queryResourceList(loginUser, ResourceType.FILE); + Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java new file mode 100644 index 0000000000..6395e07297 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class SchedulerServiceTest { + private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class); + + @Autowired + private SchedulerService schedulerService; + + @Test + public void testSetScheduleState(){ + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Project project = new Project(); + project.setName("project_test1"); + project.setId(-1); + + Map map = schedulerService.setScheduleState(loginUser, project.getName(), 44, ReleaseState.ONLINE); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + } + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java new file mode 100644 index 0000000000..1920586706 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class SessionServiceTest { + + private static final Logger logger = LoggerFactory.getLogger(SessionServiceTest.class); + + @Autowired + private SessionService sessionService; + + @Test + public void createSession(){ + + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.GENERAL_USER); + + String session = sessionService.createSession(loginUser, "127.0.0.1"); + Assert.assertTrue(StringUtils.isNotEmpty(session)); + + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java new file mode 100644 index 0000000000..a0c6d1e711 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class TaskInstanceServiceTest { + private static final Logger logger = LoggerFactory.getLogger(TaskInstanceServiceTest.class); + + @Autowired + private TaskInstanceService taskInstanceService; + + @Test + public void queryTaskListPaging(){ + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + Map map = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "", + "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + PageInfo pageInfo = (PageInfo) map.get("data"); + + if(pageInfo != null){ + logger.info(pageInfo.getLists().toString()); + } + + + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java new file mode 100644 index 0000000000..12e5a1d093 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class TenantServiceTest { + private static final Logger logger = LoggerFactory.getLogger(TenantServiceTest.class); + + @Autowired + private TenantService tenantService; + + @Test + public void queryTenantList(){ + + User loginUser = new User(); + loginUser.setUserType(UserType.ADMIN_USER); + Map map = tenantService.queryTenantList(loginUser); + Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); + logger.info(map.toString()); + + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java new file mode 100644 index 0000000000..c4339cdd41 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class UdfFuncServiceTest { + private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceTest.class); + + @Autowired + private UdfFuncService udfFuncService; + + @Test + public void queryUdfFuncListPaging(){ + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + Map map = udfFuncService.queryUdfFuncListPaging(loginUser, "", 1, 10); + Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); + + PageInfo pageInfo = (PageInfo) map.get("data"); + logger.info(pageInfo.getLists().toString()); + + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java new file mode 100644 index 0000000000..184617d392 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Map; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class UsersServiceTest { + private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class); + + @Autowired + private UsersService usersService; + + @Test + public void getUserInfo(){ + + User loginUser = new User(); + loginUser.setId(19); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = usersService.getUserInfo(loginUser); + Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); + logger.info(map.toString()); + + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitorUtilsTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitorUtilsTest.java new file mode 100644 index 0000000000..f6facce97d --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitorUtilsTest.java @@ -0,0 +1,30 @@ +package org.apache.dolphinscheduler.api.utils; + +import org.apache.dolphinscheduler.common.model.MasterServer; +import org.junit.Assert; +import org.junit.Test; +import java.util.List; + +/** + * zookeeper monitor utils test + */ +public class ZookeeperMonitorUtilsTest { + + + @Test + public void testGetMasterLsit(){ + + ZookeeperMonitor zookeeperMonitor = new ZookeeperMonitor(); + + + List masterServerList = zookeeperMonitor.getMasterServers(); + + List workerServerList = zookeeperMonitor.getWorkerServers(); + + Assert.assertTrue(masterServerList.size() >= 0); + Assert.assertTrue(workerServerList.size() >= 0); + + + } + +} \ No newline at end of file diff --git a/escheduler-api/src/test/resources/dao/data_source.properties b/dolphinscheduler-api/src/test/resources/dao/data_source.properties similarity index 100% rename from escheduler-api/src/test/resources/dao/data_source.properties rename to dolphinscheduler-api/src/test/resources/dao/data_source.properties diff --git a/dolphinscheduler-common/pom.xml b/dolphinscheduler-common/pom.xml new file mode 100644 index 0000000000..013126a282 --- /dev/null +++ b/dolphinscheduler-common/pom.xml @@ -0,0 +1,595 @@ + + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + 1.1.0-SNAPSHOT + + dolphinscheduler-common + dolphinscheduler-common + http://maven.apache.org + jar + + UTF-8 + + + + com.alibaba + fastjson + compile + + + + org.apache.httpcomponents + httpclient + + + + junit + junit + test + + + + commons-configuration + commons-configuration + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.curator + curator-client + 2.12.0 + + + log4j + log4j + + + io.netty + netty + + + + + org.apache.commons + commons-collections4 + + + + org.apache.hadoop + hadoop-common + + + org.slf4j + slf4j-log4j12 + + + jdk.tools + jdk.tools + + + servlet-api + javax.servlet + + + javax.servlet + servlet-api + + + log4j + log4j + + + org.apache.curator + curator-client + + + + commons-configuration + commons-configuration + + + io.grpc + grpc-protobuf + + + io.netty + netty + + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + + + com.google.protobuf + jackson-mapper-asl + + + com.google.code.gson + gson + + + org.apache.commons + commons-math3 + + + xmlenc + xmlenc + + + commons-net + commons-net + + + org.apache.avro + avro + + + org.apache.zookeeper + zookeeper + + + jsr305 + com.google.code.findbugs + + + javax.servlet.jsp + jsp-api + + + jersey-json + com.sun.jersey + + + jersey-server + com.sun.jersey + + + jersey-core + com.sun.jersey + + + xz + org.tukaani + + + + + + + + + + org.apache.hadoop + hadoop-client + + + org.slf4j + slf4j-log4j12 + + + servlet-api + javax.servlet + + + org.codehaus.jackson + jackson-jaxrs + + + org.codehaus.jackson + jackson-xc + + + + + + + org.fusesource.leveldbjni + leveldbjni-all + + + org.apache.zookeeper + zookeeper + + + org.apache.hadoop + hadoop-mapreduce-client-shuffle + + + jersey-client + com.sun.jersey + + + jersey-core + com.sun.jersey + + + jaxb-api + javax.xml.bind + + + log4j + log4j + + + + + + javax.servlet + javax.servlet-api + + + + org.apache.hadoop + hadoop-hdfs + + + javax.servlet + servlet-api + + + io.netty + netty + + + com.google.protobuf + protobuf-java + + + xmlenc + xmlenc + + + io.netty + netty-all + + + org.fusesource.leveldbjni + leveldbjni-all + + + jersey-core + com.sun.jersey + + + jersey-server + com.sun.jersey + + + log4j + log4j + + + + + + org.apache.hadoop + hadoop-aws + + + org.apache.hadoop + hadoop-common + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.core + jackson-annotations + + + + + + org.apache.commons + commons-lang3 + + + + org.postgresql + postgresql + + + + org.apache.hive + hive-jdbc + + + slf4j-log4j12 + org.slf4j + + + org.eclipse.jetty.aggregate + jetty-all + + + + org.apache.ant + ant + + + io.dropwizard.metrics + metrics-json + + + io.dropwizard.metrics + metrics-jvm + + + com.github.joshelser + dropwizard-metrics-hadoop-metrics2-reporter + + + + io.netty + netty-all + + + com.google.code.gson + gson + + + com.google.code.findbugs + jsr305 + + + io.dropwizard.metrics + metrics-core + + + javax.servlet + servlet-api + + + org.apache.avro + avro + + + org.apache.commons + commons-compress + + + org.apache.curator + curator-client + + + org.apache.hadoop + hadoop-auth + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.apache.hadoop + hadoop-yarn-api + + + + org.apache.zookeeper + zookeeper + + + org.codehaus.jackson + jackson-jaxrs + + + org.codehaus.jackson + jackson-xc + + + com.google.protobuf + protobuf-java + + + + org.json + json + + + log4j-slf4j-impl + org.apache.logging.log4j + + + javax.servlet + org.eclipse.jetty.orbit + + + servlet-api-2.5 + org.mortbay.jetty + + + jasper-runtime + tomcat + + + slider-core + org.apache.slider + + + hbase-server + org.apache.hbase + + + jersey-client + com.sun.jersey + + + jersey-core + com.sun.jersey + + + jersey-json + com.sun.jersey + + + jersey-server + com.sun.jersey + + + jersey-guice + com.sun.jersey.contribs + + + hbase-common + org.apache.hbase + + + hbase-hadoop2-compat + org.apache.hbase + + + hbase-client + org.apache.hbase + + + hbase-hadoop-compat + org.apache.hbase + + + tephra-hbase-compat-1.0 + co.cask.tephra + + + jaxb-api + javax.xml.bind + + + hive-llap-client + org.apache.hive + + + hive-llap-common + org.apache.hive + + + hive-llap-server + org.apache.hive + + + tephra-core + co.cask.tephra + + + ant + ant + + + stringtemplate + org.antlr + + + antlr-runtime + org.antlr + + + hive-shims + org.apache.hive + + + jsp-api + javax.servlet + + + log4j-api + org.apache.logging.log4j + + + log4j-core + org.apache.logging.log4j + + + log4j-web + org.apache.logging.log4j + + + + + + + ch.qos.logback + logback-classic + + + ch.qos.logback + logback-core + + + com.github.oshi + oshi-core + + + + ru.yandex.clickhouse + clickhouse-jdbc + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + jaxb-api + javax.xml.bind + + + + + + com.microsoft.sqlserver + mssql-jdbc + + + azure-keyvault + com.microsoft.azure + + + + + + org.mortbay.jetty + jsp-2.1 + + + org.mortbay.jetty + servlet-api-2.5 + + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${java.version} + ${java.version} + ${project.build.sourceEncoding} + + + + + diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java new file mode 100644 index 0000000000..9ca923f3b2 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java @@ -0,0 +1,923 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common; + +import org.apache.dolphinscheduler.common.utils.OSUtils; + +import java.util.regex.Pattern; + +/** + * Constants + */ +public final class Constants { + + /** + * zookeeper properties path + */ + public static final String ZOOKEEPER_PROPERTIES_PATH = "zookeeper.properties"; + + /** + * worker properties path + */ + public static final String WORKER_PROPERTIES_PATH = "worker.properties"; + + /** + * master properties path + */ + public static final String MASTER_PROPERTIES_PATH = "master.properties"; + + /** + * hadoop properties path + */ + public static final String HADOOP_PROPERTIES_PATH = "/common/hadoop/hadoop.properties"; + + /** + * common properties path + */ + public static final String COMMON_PROPERTIES_PATH = "/common/common.properties"; + + /** + * dao properties path + */ + public static final String DAO_PROPERTIES_PATH = "/dao/data_source.properties"; + + /** + * fs.defaultFS + */ + public static final String FS_DEFAULTFS = "fs.defaultFS"; + + + /** + * fs s3a endpoint + */ + public static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint"; + + /** + * fs s3a access key + */ + public static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key"; + + /** + * fs s3a secret key + */ + public static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key"; + + + /** + * yarn.resourcemanager.ha.rm.idsfs.defaultFS + */ + public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids"; + + /** + * yarn.application.status.address + */ + public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address"; + + /** + * hdfs configuration + * hdfs.root.user + */ + public static final String HDFS_ROOT_USER = "hdfs.root.user"; + + /** + * hdfs configuration + * data.store2hdfs.basepath + */ + public static final String DATA_STORE_2_HDFS_BASEPATH = "data.store2hdfs.basepath"; + + /** + * data.basedir.path + */ + public static final String DATA_BASEDIR_PATH = "data.basedir.path"; + + /** + * data.download.basedir.path + */ + public static final String DATA_DOWNLOAD_BASEDIR_PATH = "data.download.basedir.path"; + + /** + * process.exec.basepath + */ + public static final String PROCESS_EXEC_BASEPATH = "process.exec.basepath"; + + /** + * escheduler.env.path + */ + public static final String ESCHEDULER_ENV_PATH = "escheduler.env.path"; + + + /** + * python home + */ + public static final String PYTHON_HOME="PYTHON_HOME"; + + /** + * resource.view.suffixs + */ + public static final String RESOURCE_VIEW_SUFFIXS = "resource.view.suffixs"; + + /** + * development.state + */ + public static final String DEVELOPMENT_STATE = "development.state"; + + /** + * res.upload.startup.type + */ + public static final String RES_UPLOAD_STARTUP_TYPE = "res.upload.startup.type"; + + /** + * zookeeper quorum + */ + public static final String ZOOKEEPER_QUORUM = "zookeeper.quorum"; + + /** + * MasterServer directory registered in zookeeper + */ + public static final String ZOOKEEPER_ESCHEDULER_MASTERS = "zookeeper.escheduler.masters"; + + /** + * WorkerServer directory registered in zookeeper + */ + public static final String ZOOKEEPER_ESCHEDULER_WORKERS = "zookeeper.escheduler.workers"; + + /** + * all servers directory registered in zookeeper + */ + public static final String ZOOKEEPER_ESCHEDULER_DEAD_SERVERS = "zookeeper.escheduler.dead.servers"; + + /** + * MasterServer lock directory registered in zookeeper + */ + public static final String ZOOKEEPER_ESCHEDULER_LOCK_MASTERS = "zookeeper.escheduler.lock.masters"; + + /** + * WorkerServer lock directory registered in zookeeper + */ + public static final String ZOOKEEPER_ESCHEDULER_LOCK_WORKERS = "zookeeper.escheduler.lock.workers"; + + /** + * MasterServer failover directory registered in zookeeper + */ + public static final String ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_MASTERS = "zookeeper.escheduler.lock.failover.masters"; + + /** + * WorkerServer failover directory registered in zookeeper + */ + public static final String ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_WORKERS = "zookeeper.escheduler.lock.failover.workers"; + + /** + * MasterServer startup failover runing and fault tolerance process + */ + public static final String ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "zookeeper.escheduler.lock.failover.startup.masters"; + + /** + * need send warn times when master server or worker server failover + */ + public static final int ESCHEDULER_WARN_TIMES_FAILOVER = 3; + + /** + * comma , + */ + public static final String COMMA = ","; + + /** + * COLON : + */ + public static final String COLON = ":"; + + /** + * SINGLE_SLASH / + */ + public static final String SINGLE_SLASH = "/"; + + /** + * DOUBLE_SLASH // + */ + public static final String DOUBLE_SLASH = "//"; + + /** + * SEMICOLON ; + */ + public static final String SEMICOLON = ";"; + + /** + * EQUAL SIGN + */ + public static final String EQUAL_SIGN = "="; + + /** + * ZOOKEEPER_SESSION_TIMEOUT + */ + public static final String ZOOKEEPER_SESSION_TIMEOUT = "zookeeper.session.timeout"; + + public static final String ZOOKEEPER_CONNECTION_TIMEOUT = "zookeeper.connection.timeout"; + + public static final String ZOOKEEPER_RETRY_SLEEP = "zookeeper.retry.sleep"; + + public static final String ZOOKEEPER_RETRY_MAXTIME = "zookeeper.retry.maxtime"; + + + public static final String MASTER_HEARTBEAT_INTERVAL = "master.heartbeat.interval"; + + public static final String MASTER_EXEC_THREADS = "master.exec.threads"; + + public static final String MASTER_EXEC_TASK_THREADS = "master.exec.task.number"; + + + public static final String MASTER_COMMIT_RETRY_TIMES = "master.task.commit.retryTimes"; + + public static final String MASTER_COMMIT_RETRY_INTERVAL = "master.task.commit.interval"; + + + public static final String WORKER_EXEC_THREADS = "worker.exec.threads"; + + public static final String WORKER_HEARTBEAT_INTERVAL = "worker.heartbeat.interval"; + + public static final String WORKER_FETCH_TASK_NUM = "worker.fetch.task.num"; + + public static final String WORKER_MAX_CPULOAD_AVG = "worker.max.cpuload.avg"; + + public static final String WORKER_RESERVED_MEMORY = "worker.reserved.memory"; + + public static final String MASTER_MAX_CPULOAD_AVG = "master.max.cpuload.avg"; + + public static final String MASTER_RESERVED_MEMORY = "master.reserved.memory"; + + + /** + * escheduler tasks queue + */ + public static final String SCHEDULER_TASKS_QUEUE = "tasks_queue"; + + /** + * escheduler need kill tasks queue + */ + public static final String SCHEDULER_TASKS_KILL = "tasks_kill"; + + public static final String ZOOKEEPER_SCHEDULER_ROOT = "zookeeper.escheduler.root"; + + public static final String SCHEDULER_QUEUE_IMPL = "escheduler.queue.impl"; + + + /** + * date format of yyyy-MM-dd HH:mm:ss + */ + public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss"; + + + /** + * date format of yyyyMMddHHmmss + */ + public static final String YYYYMMDDHHMMSS = "yyyyMMddHHmmss"; + + /** + * http connect time out + */ + public static final int HTTP_CONNECT_TIMEOUT = 60 * 1000; + + + /** + * http connect request time out + */ + public static final int HTTP_CONNECTION_REQUEST_TIMEOUT = 60 * 1000; + + /** + * httpclient soceket time out + */ + public static final int SOCKET_TIMEOUT = 60 * 1000; + + /** + * http header + */ + public static final String HTTP_HEADER_UNKNOWN = "unKnown"; + + /** + * http X-Forwarded-For + */ + public static final String HTTP_X_FORWARDED_FOR = "X-Forwarded-For"; + + /** + * http X-Real-IP + */ + public static final String HTTP_X_REAL_IP = "X-Real-IP"; + + /** + * UTF-8 + */ + public static final String UTF_8 = "UTF-8"; + + /** + * user name regex + */ + public static final Pattern REGEX_USER_NAME = Pattern.compile("[a-zA-Z0-9]{3,20}"); + + /** + * email regex + */ + public static final Pattern REGEX_MAIL_NAME = Pattern.compile("^([a-z0-9A-Z]+[-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$"); + + /** + * read permission + */ + public static final int READ_PERMISSION = 2 * 1; + + + /** + * write permission + */ + public static final int WRITE_PERMISSION = 2 * 2; + + + /** + * execute permission + */ + public static final int EXECUTE_PERMISSION = 1; + + /** + * default admin permission + */ + public static final int DEFAULT_ADMIN_PERMISSION = 7; + + + /** + * all permissions + */ + public static final int ALL_PERMISSIONS = READ_PERMISSION | WRITE_PERMISSION | EXECUTE_PERMISSION; + + /** + * max task timeout + */ + public static final int MAX_TASK_TIMEOUT = 24 * 3600; + + + /** + * heartbeat threads number + */ + public static final int defaulWorkerHeartbeatThreadNum = 5; + + /** + * heartbeat interval + */ + public static final int defaultWorkerHeartbeatInterval = 60; + + /** + * worker fetch task number + */ + public static final int defaultWorkerFetchTaskNum = 1; + + /** + * worker execute threads number + */ + public static final int defaultWorkerExecThreadNum = 10; + + /** + * master cpu load + */ + public static final int defaultMasterCpuLoad = Runtime.getRuntime().availableProcessors() * 2; + + /** + * master reserved memory + */ + public static final double defaultMasterReservedMemory = OSUtils.totalMemorySize() / 10; + + /** + * worker cpu load + */ + public static final int defaultWorkerCpuLoad = Runtime.getRuntime().availableProcessors() * 2; + + /** + * worker reserved memory + */ + public static final double defaultWorkerReservedMemory = OSUtils.totalMemorySize() / 10; + + + /** + * master execute threads number + */ + public static final int defaultMasterExecThreadNum = 100; + + + /** + * default master concurrent task execute num + */ + public static final int defaultMasterTaskExecNum = 20; + + /** + * default log cache rows num,output when reach the number + */ + public static final int defaultLogRowsNum = 4 * 16; + + /** + * log flush interval,output when reach the interval + */ + public static final int defaultLogFlushInterval = 1000; + + + /** + * default master heartbeat thread number + */ + public static final int defaulMasterHeartbeatThreadNum = 5; + + + /** + * default master heartbeat interval + */ + public static final int defaultMasterHeartbeatInterval = 60; + + /** + * default master commit retry times + */ + public static final int defaultMasterCommitRetryTimes = 5; + + + /** + * default master commit retry interval + */ + public static final int defaultMasterCommitRetryInterval = 100; + + /** + * time unit secong to minutes + */ + public static final int SEC_2_MINUTES_TIME_UNIT = 60; + + + /*** + * + * rpc port + */ + public static final int RPC_PORT = 50051; + + /** + * forbid running task + */ + public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN"; + + /** + * task record configuration path + */ + public static final String DATA_SOURCE_PROPERTIES = "dao/data_source.properties"; + + public static final String TASK_RECORD_URL = "task.record.datasource.url"; + + public static final String TASK_RECORD_FLAG = "task.record.flag"; + + public static final String TASK_RECORD_USER = "task.record.datasource.username"; + + public static final String TASK_RECORD_PWD = "task.record.datasource.password"; + + public static final String DEFAULT = "Default"; + public static final String USER = "user"; + public static final String PASSWORD = "password"; + public static final String XXXXXX = "******"; + + public static String TASK_RECORD_TABLE_HIVE_LOG = "eamp_hive_log_hd"; + + public static String TASK_RECORD_TABLE_HISTORY_HIVE_LOG = "eamp_hive_hist_log_hd"; + + public static final String STATUS = "status"; + + + + /** + * command parameter keys + */ + public static final String CMDPARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId"; + + public static final String CMDPARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList"; + + public static final String CMDPARAM_RECOVERY_WAITTING_THREAD = "WaittingThreadInstanceId"; + + public static final String CMDPARAM_SUB_PROCESS = "processInstanceId"; + + public static final String CMDPARAM_EMPTY_SUB_PROCESS = "0"; + + public static final String CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId"; + + public static final String CMDPARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId"; + + public static final String CMDPARAM_START_NODE_NAMES = "StartNodeNameList"; + + /** + * complement data start date + */ + public static final String CMDPARAM_COMPLEMENT_DATA_START_DATE = "complementStartDate"; + + /** + * complement data end date + */ + public static final String CMDPARAM_COMPLEMENT_DATA_END_DATE = "complementEndDate"; + + /** + * hadoop configuration + */ + public static final String HADOOP_RM_STATE_ACTIVE = "ACTIVE"; + + public static final String HADOOP_RM_STATE_STANDBY = "STANDBY"; + + public static final String HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT = "resource.manager.httpaddress.port"; + + + /** + * data source config + */ + + public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name"; + + public static final String SPRING_DATASOURCE_URL = "spring.datasource.url"; + + public static final String SPRING_DATASOURCE_USERNAME = "spring.datasource.username"; + + public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password"; + + public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout"; + + public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize"; + + public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle"; + + public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive"; + + public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait"; + + public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis"; + + public static final String SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS = "spring.datasource.timeBetweenConnectErrorMillis"; + + public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis"; + + public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery"; + + public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle"; + + public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow"; + + public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn"; + + public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements"; + + public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit"; + + public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive"; + + public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize"; + + public static final String DEVELOPMENT = "development"; + + public static final String QUARTZ_PROPERTIES_PATH = "quartz.properties"; + + /** + * sleep time + */ + public static final int SLEEP_TIME_MILLIS = 1000; + + /** + * heartbeat for zk info length + */ + public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 6; + + + /** + * hadoop params constant + */ + /** + * jar + */ + public static final String JAR = "jar"; + + /** + * hadoop + */ + public static final String HADOOP = "hadoop"; + + /** + * -D parameter + */ + public static final String D = "-D"; + + /** + * -D mapreduce.job.queuename=ququename + */ + public static final String MR_QUEUE = "mapreduce.job.queuename"; + + + /** + * jdbc class name + */ + /** + * mysql + */ + public static final String JDBC_MYSQL_CLASS_NAME = "com.mysql.jdbc.Driver"; + + /** + * postgresql + */ + public static final String JDBC_POSTGRESQL_CLASS_NAME = "org.postgresql.Driver"; + + /** + * hive + */ + public static final String JDBC_HIVE_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver"; + + /** + * spark + */ + public static final String JDBC_SPARK_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver"; + + /** + * ClickHouse + */ + public static final String JDBC_CLICKHOUSE_CLASS_NAME = "ru.yandex.clickhouse.ClickHouseDriver"; + + /** + * Oracle + */ + public static final String JDBC_ORACLE_CLASS_NAME = "oracle.jdbc.driver.OracleDriver"; + + /** + * Oracle + */ + public static final String JDBC_SQLSERVER_CLASS_NAME = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; + + /** + * spark params constant + */ + public static final String MASTER = "--master"; + + public static final String DEPLOY_MODE = "--deploy-mode"; + + /** + * --class CLASS_NAME + */ + public static final String CLASS = "--class"; + + /** + * --driver-cores NUM + */ + public static final String DRIVER_CORES = "--driver-cores"; + + /** + * --driver-memory MEM + */ + public static final String DRIVER_MEMORY = "--driver-memory"; + + /** + * --num-executors NUM + */ + public static final String NUM_EXECUTORS = "--num-executors"; + + /** + * --executor-cores NUM + */ + public static final String EXECUTOR_CORES = "--executor-cores"; + + /** + * --executor-memory MEM + */ + public static final String EXECUTOR_MEMORY = "--executor-memory"; + + + /** + * --queue QUEUE + */ + public static final String SPARK_QUEUE = "--queue"; + + + /** + * exit code success + */ + public static final int EXIT_CODE_SUCCESS = 0; + + /** + * exit code kill + */ + public static final int EXIT_CODE_KILL = 137; + + /** + * exit code failure + */ + public static final int EXIT_CODE_FAILURE = -1; + + /** + * date format of yyyyMMdd + */ + public static final String PARAMETER_FORMAT_DATE = "yyyyMMdd"; + + /** + * date format of yyyyMMddHHmmss + */ + public static final String PARAMETER_FORMAT_TIME = "yyyyMMddHHmmss"; + + /** + * system date(yyyyMMddHHmmss) + */ + public static final String PARAMETER_DATETIME = "system.datetime"; + + /** + * system date(yyyymmdd) today + */ + public static final String PARAMETER_CURRENT_DATE = "system.biz.curdate"; + + /** + * system date(yyyymmdd) yesterday + */ + public static final String PARAMETER_BUSINESS_DATE = "system.biz.date"; + + /** + * ACCEPTED + */ + public static final String ACCEPTED = "ACCEPTED"; + + /** + * SUCCEEDED + */ + public static final String SUCCEEDED = "SUCCEEDED"; + /** + * NEW + */ + public static final String NEW = "NEW"; + /** + * NEW_SAVING + */ + public static final String NEW_SAVING = "NEW_SAVING"; + /** + * SUBMITTED + */ + public static final String SUBMITTED = "SUBMITTED"; + /** + * FAILED + */ + public static final String FAILED = "FAILED"; + /** + * KILLED + */ + public static final String KILLED = "KILLED"; + /** + * RUNNING + */ + public static final String RUNNING = "RUNNING"; + /** + * underline "_" + */ + public static final String UNDERLINE = "_"; + /** + * quartz job prifix + */ + public static final String QUARTZ_JOB_PRIFIX = "job"; + /** + * quartz job group prifix + */ + public static final String QUARTZ_JOB_GROUP_PRIFIX = "jobgroup"; + /** + * projectId + */ + public static final String PROJECT_ID = "projectId"; + /** + * processId + */ + public static final String SCHEDULE_ID = "scheduleId"; + /** + * schedule + */ + public static final String SCHEDULE = "schedule"; + /** + * application regex + */ + public static final String APPLICATION_REGEX = "application_\\d+_\\d+"; + public static final String PID = "pid"; + /** + * month_begin + */ + public static final String MONTH_BEGIN = "month_begin"; + /** + * add_months + */ + public static final String ADD_MONTHS = "add_months"; + /** + * month_end + */ + public static final String MONTH_END = "month_end"; + /** + * week_begin + */ + public static final String WEEK_BEGIN = "week_begin"; + /** + * week_end + */ + public static final String WEEK_END = "week_end"; + /** + * timestamp + */ + public static final String TIMESTAMP = "timestamp"; + public static final char SUBTRACT_CHAR = '-'; + public static final char ADD_CHAR = '+'; + public static final char MULTIPLY_CHAR = '*'; + public static final char DIVISION_CHAR = '/'; + public static final char LEFT_BRACE_CHAR = '('; + public static final char RIGHT_BRACE_CHAR = ')'; + public static final String ADD_STRING = "+"; + public static final String MULTIPLY_STRING = "*"; + public static final String DIVISION_STRING = "/"; + public static final String LEFT_BRACE_STRING = "("; + public static final char P = 'P'; + public static final char N = 'N'; + public static final String SUBTRACT_STRING = "-"; + public static final String GLOBAL_PARAMS = "globalParams"; + public static final String LOCAL_PARAMS = "localParams"; + public static final String PROCESS_INSTANCE_STATE = "processInstanceState"; + public static final String TASK_LIST = "taskList"; + public static final String RWXR_XR_X = "rwxr-xr-x"; + + /** + * master/worker server use for zk + */ + public static final String MASTER_PREFIX = "master"; + public static final String WORKER_PREFIX = "worker"; + public static final String DELETE_ZK_OP = "delete"; + public static final String ADD_ZK_OP = "add"; + public static final String ALIAS = "alias"; + public static final String CONTENT = "content"; + public static final String DEPENDENT_SPLIT = ":||"; + public static final String DEPENDENT_ALL = "ALL"; + + + /** + * preview schedule execute count + */ + public static final int PREVIEW_SCHEDULE_EXECUTE_COUNT = 5; + + /** + * kerberos + */ + public static final String KERBEROS = "kerberos"; + + /** + * java.security.krb5.conf + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java.security.krb5.conf.path + */ + public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path"; + + /** + * hadoop.security.authentication + */ + public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; + + /** + * hadoop.security.authentication + */ + public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state"; + + + /** + * loginUserFromKeytab user + */ + public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username"; + + /** + * default worker group id + */ + public static final int DEFAULT_WORKER_ID = -1; + + /** + * loginUserFromKeytab path + */ + public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path"; + + + /** + * hive conf + */ + public static final String HIVE_CONF = "hiveconf:"; + + //flink 任务 + public static final String FLINK_YARN_CLUSTER = "yarn-cluster"; + public static final String FLINK_RUN_MODE = "-m"; + public static final String FLINK_YARN_SLOT = "-ys"; + public static final String FLINK_APP_NAME = "-ynm"; + public static final String FLINK_TASK_MANAGE = "-yn"; + + public static final String FLINK_JOB_MANAGE_MEM = "-yjm"; + public static final String FLINK_TASK_MANAGE_MEM = "-ytm"; + public static final String FLINK_detach = "-d"; + public static final String FLINK_MAIN_CLASS = "-c"; + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/IStoppable.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/IStoppable.java new file mode 100644 index 0000000000..b0ffe4ac6d --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/IStoppable.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common; + +/** + * server stop interface. + */ +public interface IStoppable { + /** + * Stop this service. + * @param cause why stopping + */ + public void stop(String cause); + +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertStatus.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertStatus.java new file mode 100644 index 0000000000..4c371c88e0 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertStatus.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * alert status + */ +public enum AlertStatus { + /** + * 0 waiting executed; 1 execute successfully,2 execute failed + */ + WAIT_EXECUTION,EXECUTION_SUCCESS,EXECUTION_FAILURE +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertType.java new file mode 100644 index 0000000000..8260e6adb0 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertType.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * warning message notification method + */ +public enum AlertType { + /** + * 0 email; 1 SMS + */ + EMAIL,SMS +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java new file mode 100644 index 0000000000..7938e65f95 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * command types + */ +public enum CommandType { + + /** + * command types + * 0 start a new process + * 1 start a new process from current nodes + * 2 recover tolerance fault work flow + * 3 start process from paused task nodes + * 4 start process from failure task nodes + * 5 complement data + * 6 start a new process from scheduler + * 7 repeat running a work flow + * 8 pause a process + * 9 stop a process + * 10 recover waiting thread + */ + START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS, + START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD; +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CycleEnum.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CycleEnum.java new file mode 100644 index 0000000000..633a3da409 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CycleEnum.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * cycle enums + */ +public enum CycleEnum { + /** + * 0 minute; 1 hour; 2 day; 3 week; 4 month; 5 year; + */ + MINUTE, HOUR, DAY, WEEK, MONTH, YEAR + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DataType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DataType.java new file mode 100644 index 0000000000..eda00d819a --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DataType.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * data types in user define parameter + */ +public enum DataType { + /** + * 0 string + * 1 integer + * 2 long + * 3 float + * 4 double + * 5 date, "YYYY-MM-DD" + * 6 time, "HH:MM:SS" + * 7 time stamp + * 8 Boolean + */ + VARCHAR,INTEGER,LONG,FLOAT,DOUBLE,DATE,TIME,TIMESTAMP,BOOLEAN +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java new file mode 100644 index 0000000000..03b54fea27 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * data base types + */ +public enum DbType { + /** + * 0 mysql + * 1 postgresql + * 2 hive + * 3 spark + * 4 clickhouse + * 5 oracle + * 6 sqlserver + */ + MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE, ORACLE, SQLSERVER +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependResult.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependResult.java new file mode 100644 index 0000000000..3e305f3920 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependResult.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * depend result + */ +public enum DependResult { + + + /** + * 0 success + * 1 waiting + * 2 failed + */ + SUCCESS, WAITING, FAILED +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependStrategy.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependStrategy.java new file mode 100644 index 0000000000..b409ca0b66 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependStrategy.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * depend strategy + */ +public enum DependStrategy { + + /** + * 0 none,1 all success 2 all failed 3 one success 4 one failed + */ + NONE, ALL_SUCCESS, ALL_FAILED, ONE_SUCCESS, ONE_FAILED + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependentRelation.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependentRelation.java new file mode 100644 index 0000000000..f84a37e4ed --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependentRelation.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * dependent relation: and or + */ +public enum DependentRelation { + + AND,OR; +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Direct.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Direct.java new file mode 100644 index 0000000000..2eb9f3106c --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Direct.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * parameter of stored procedure + */ +public enum Direct { + /** + * 0 in; 1 out; + */ + IN,OUT +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java new file mode 100644 index 0000000000..117c627f20 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + + +/** + * runing status for workflow and task nodes + * + */ +public enum ExecutionStatus { + + /** + * status: + * 0 submit success + * 1 running + * 2 ready pause + * 3 pause + * 4 ready stop + * 5 stop + * 6 failure + * 7 success + * 8 need fault tolerance + * 9 kill + * 10 waiting thread + * 11 waiting depend node complete + */ + SUBMITTED_SUCCESS,RUNNING_EXEUTION,READY_PAUSE,PAUSE,READY_STOP,STOP,FAILURE,SUCCESS, + NEED_FAULT_TOLERANCE,KILL,WAITTING_THREAD,WAITTING_DEPEND; + + + /** + * status is success + * @return + */ + public boolean typeIsSuccess(){ + return this == SUCCESS; + } + + /** + * status is failure + * @return + */ + public boolean typeIsFailure(){ + return this == FAILURE || this == NEED_FAULT_TOLERANCE; + } + + /** + * status is finished + * @return + */ + public boolean typeIsFinished(){ + + return typeIsSuccess() || typeIsFailure() || typeIsCancel() || typeIsPause() + || typeIsWaittingThread(); + } + + /** + * status is waiting thread + * @return + */ + public boolean typeIsWaittingThread(){ + return this == WAITTING_THREAD; + } + + /** + * status is pause + * @return + */ + public boolean typeIsPause(){ + return this == PAUSE; + } + + /** + * status is running + * @return + */ + public boolean typeIsRunning(){ + return this == RUNNING_EXEUTION || this == WAITTING_DEPEND; + } + + /** + * status is cancel + */ + public boolean typeIsCancel(){ return this == KILL || this == STOP ;} + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/FailureStrategy.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/FailureStrategy.java new file mode 100644 index 0000000000..6a691c6236 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/FailureStrategy.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * failure policy when some task node failed. + */ +public enum FailureStrategy { + + /** + * 0 ending process when some tasks failed. + * 1 continue running when some tasks failed. + **/ + END, CONTINUE; + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Flag.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Flag.java new file mode 100644 index 0000000000..2dc484763a --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Flag.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * have_script + * have_file + * can_retry + * have_arr_variables + * have_map_variables + * have_alert + */ +public enum Flag { + /** + * 0 no + * 1 yes + */ + NO,YES +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpCheckCondition.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpCheckCondition.java new file mode 100644 index 0000000000..15d0ca4ddf --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpCheckCondition.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * http check condition + */ +public enum HttpCheckCondition { + /** + * 0 status_code_default:200 + * 1 status_code_custom + * 2 body_contains + * 3 body_not_contains + */ + STATUS_CODE_DEFAULT,STATUS_CODE_CUSTOM, BODY_CONTAINS, BODY_NOT_CONTAINS +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpMethod.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpMethod.java new file mode 100644 index 0000000000..abceedd9f6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpMethod.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * http method + */ +public enum HttpMethod { + /** + * 0 get + * 1 post + * 2 head + * 3 put + * 4 delete + */ + GET, POST, HEAD, PUT, DELETE +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpParametersType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpParametersType.java new file mode 100644 index 0000000000..857ebf612d --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/HttpParametersType.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * http parameters type + */ +public enum HttpParametersType { + /** + * 0 parameter; + * 1 body; + * 2 headers; + */ + PARAMETER,BODY,HEADERS +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Priority.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Priority.java new file mode 100644 index 0000000000..3b33cb3621 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Priority.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * define process and task priority + */ +public enum Priority { + /** + * 0 highest priority + * 1 higher priority + * 2 medium priority + * 3 lower priority + * 4 lowest priority + */ + HIGHEST,HIGH,MEDIUM,LOW,LOWEST +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ProgramType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ProgramType.java new file mode 100644 index 0000000000..45e88d63c2 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ProgramType.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + + +/** + * support program types + */ +public enum ProgramType { + /** + * 0 JAVA,1 SCALA,2 PYTHON + */ + JAVA, + SCALA, + PYTHON +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ReleaseState.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ReleaseState.java new file mode 100644 index 0000000000..84e7ed2606 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ReleaseState.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * process define release state + */ +public enum ReleaseState { + + /** + * 0 offline + * 1 on line + */ + OFFLINE,ONLINE; + + + public static ReleaseState getEnum(int value){ + for (ReleaseState e:ReleaseState.values()) { + if(e.ordinal() == value) { + return e; + } + } + //For values out of enum scope + return null; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResUploadType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResUploadType.java new file mode 100644 index 0000000000..0155c1268c --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResUploadType.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * data base types + */ +public enum ResUploadType { + /** + * 0 hdfs + * 1 s3 + * 2 none + */ + HDFS,S3,NONE +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResourceType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResourceType.java new file mode 100644 index 0000000000..5c848268fe --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResourceType.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * resource type + */ +public enum ResourceType { + /** + * 0 file, 1 udf + */ + FILE,UDF +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/RunMode.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/RunMode.java new file mode 100644 index 0000000000..2ff507bc39 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/RunMode.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * complement data run mode + */ +public enum RunMode { + /** + * 0 serial run + * 1 parallel run + * */ + RUN_MODE_SERIAL, RUN_MODE_PARALLEL +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SelfDependStrategy.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SelfDependStrategy.java new file mode 100644 index 0000000000..31799700fa --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SelfDependStrategy.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * self depency strategy + */ +public enum SelfDependStrategy { + + /** + * 0 donot depend the last cycle; + * 1 depend the last cycle + **/ + NO_DEP_PRE, DEP_PRE +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ServerEnum.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ServerEnum.java new file mode 100644 index 0000000000..f9e0922d26 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ServerEnum.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * cycle enums + */ +public enum ServerEnum { + + /** + * master server , worker server + */ + MASTER_SERVER,WORKER_SERVER + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ShowType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ShowType.java new file mode 100644 index 0000000000..4ca13471b6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ShowType.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * show type for email + */ +public enum ShowType { + /** + * 0 TABLE; + * 1 TEXT; + * 2 attachment; + * 3 TABLE+attachment; + */ + TABLE, + TEXT, + ATTACHMENT, + TABLEATTACHMENT + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskDependType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskDependType.java new file mode 100644 index 0000000000..ae0df68c48 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskDependType.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * task node depend type + */ +public enum TaskDependType { + /** + * 0 run current tasks only + * 1 run current tasks and previous tasks + * 2 run current tasks and the other tasks that depend on current tasks; + */ + TASK_ONLY, TASK_PRE, TASK_POST; + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskRecordStatus.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskRecordStatus.java new file mode 100644 index 0000000000..b3d5426023 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskRecordStatus.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + + +/** + * task record status + * + */ +public enum TaskRecordStatus { + + /** + * status: + * 0 sucess + * 1 failure + * 2 exception + */ + SUCCESS,FAILURE,EXCEPTION + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java new file mode 100644 index 0000000000..05e6e469ac --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * type of task state + */ +public enum TaskStateType { + /** + * 0 waiting running + * 1 running + * 2 finish + * 3 failed + * 4 success + */ + WAITTING, RUNNING, FINISH, FAILED, SUCCESS; + + + /** + * convert task state to execute status integer array ; + * @param taskStateType + * @return + */ + public static int[] convert2ExecutStatusIntArray(TaskStateType taskStateType){ + + switch (taskStateType){ + case SUCCESS: + return new int[]{ExecutionStatus.SUCCESS.ordinal()}; + case FAILED: + return new int[]{ + ExecutionStatus.FAILURE.ordinal(), + ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal()}; + case FINISH: + return new int[]{ + ExecutionStatus.PAUSE.ordinal(), + ExecutionStatus.STOP.ordinal() + }; + case RUNNING: + return new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), + ExecutionStatus.RUNNING_EXEUTION.ordinal(), + ExecutionStatus.READY_PAUSE.ordinal(), + ExecutionStatus.READY_STOP.ordinal()}; + case WAITTING: + return new int[]{ + ExecutionStatus.SUBMITTED_SUCCESS.ordinal() + }; + default: + break; + } + return null; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskTimeoutStrategy.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskTimeoutStrategy.java new file mode 100644 index 0000000000..557d9b8b77 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskTimeoutStrategy.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * task timeout strategy + */ +public enum TaskTimeoutStrategy { + /** + * 0 warn + * 1 failed + * 2 warn+failed + */ + WARN, FAILED, WARNFAILED +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java new file mode 100644 index 0000000000..8575a87e54 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * task node type + */ +public enum TaskType { + /** + * 0 SHELL + * 1 SQL + * 2 SUB_PROCESS + * 3 PROCEDURE + * 4 MR + * 5 SPARK + * 6 PYTHON + * 7 DEPENDENT + * 8 FLINK + * 9 HTTP + */ + SHELL,SQL, SUB_PROCESS,PROCEDURE,MR,SPARK,PYTHON,DEPENDENT,FLINK,HTTP; + + public static boolean typeIsNormalTask(String typeName) { + TaskType taskType = TaskType.valueOf(typeName); + return !(taskType == TaskType.SUB_PROCESS || taskType == TaskType.DEPENDENT); + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java new file mode 100644 index 0000000000..1fbb8cb6a6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * UDF type + */ +public enum UdfType { + /** + * 0 hive; 1 spark + */ + HIVE, SPARK +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UserType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UserType.java new file mode 100644 index 0000000000..62bb6b9d59 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UserType.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * user type + */ +public enum UserType { + /** + * 0 admin user; 1 general user + */ + ADMIN_USER, + GENERAL_USER +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/WarningType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/WarningType.java new file mode 100644 index 0000000000..7e319a4430 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/WarningType.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +/** + * types for whether to send warning when process ending; + */ +public enum WarningType { + /** + * 0 do not send warning; + * 1 send if process success; + * 2 send if process failed; + * 3 send if process ending; + */ + NONE, SUCCESS, FAILURE, ALL; + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ZKNodeType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ZKNodeType.java new file mode 100644 index 0000000000..36a3d86a80 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ZKNodeType.java @@ -0,0 +1,15 @@ +package org.apache.dolphinscheduler.common.enums; + +/** + * zk node type + */ +public enum ZKNodeType { + + /** + * 0 do not send warning; + * 1 send if process success; + * 2 send if process failed; + * 3 send if process ending; + */ + MASTER, WORKER, DEAD_SERVER, TASK_QUEUE; +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java new file mode 100644 index 0000000000..ee7faff3e7 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java @@ -0,0 +1,519 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.graph; + +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +/** + * analysis of DAG + * Node: node + * NodeInfo:node description information + * EdgeInfo: edge description information + */ +public class DAG { + + + private static final Logger logger = LoggerFactory.getLogger(DAG.class); + + private final ReadWriteLock lock = new ReentrantReadWriteLock(); + + /** + * node map, key is node, value is node information + */ + private volatile Map nodesMap; + + /** + * edge map. key is node of origin;value is Map with key for destination node and value for edge + */ + private volatile Map> edgesMap; + + /** + * reversed edge set,key is node of destination, value is Map with key for origin node and value for edge + */ + private volatile Map> reverseEdgesMap; + + + public DAG() { + nodesMap = new HashMap<>(); + edgesMap = new HashMap<>(); + reverseEdgesMap = new HashMap<>(); + } + + + /** + * add node information + * + * @param node node + * @param nodeInfo node information + */ + public void addNode(Node node, NodeInfo nodeInfo) { + lock.writeLock().lock(); + + try{ + nodesMap.put(node, nodeInfo); + }finally { + lock.writeLock().unlock(); + } + + } + + + /** + * add edge + * @param fromNode node of origin + * @param toNode node of destination + * @return The result of adding an edge. returns false if the DAG result is a ring result + */ + public boolean addEdge(Node fromNode, Node toNode) { + return addEdge(fromNode, toNode, false); + } + + + /** + * add edge + * @param fromNode node of origin + * @param toNode node of destination + * @param createNode whether the node needs to be created if it does not exist + * @return The result of adding an edge. returns false if the DAG result is a ring result + */ + private boolean addEdge(Node fromNode, Node toNode, boolean createNode) { + return addEdge(fromNode, toNode, null, createNode); + } + + + /** + * add edge + * + * @param fromNode node of origin + * @param toNode node of destination + * @param edge edge description + * @param createNode whether the node needs to be created if it does not exist + * @return The result of adding an edge. returns false if the DAG result is a ring result + */ + public boolean addEdge(Node fromNode, Node toNode, EdgeInfo edge, boolean createNode) { + lock.writeLock().lock(); + + try{ + + // Whether an edge can be successfully added(fromNode -> toNode) + if (!isLegalAddEdge(fromNode, toNode, createNode)) { + logger.error("serious error: add edge({} -> {}) is invalid, cause cycle!", fromNode, toNode); + return false; + } + + addNodeIfAbsent(fromNode, null); + addNodeIfAbsent(toNode, null); + + addEdge(fromNode, toNode, edge, edgesMap); + addEdge(toNode, fromNode, edge, reverseEdgesMap); + + return true; + }finally { + lock.writeLock().unlock(); + } + + } + + + /** + * whether this node is contained + * + * @param node node + * @return + */ + public boolean containsNode(Node node) { + lock.readLock().lock(); + + try{ + return nodesMap.containsKey(node); + }finally { + lock.readLock().unlock(); + } + } + + + /** + * whether this edge is contained + * + * @param fromNode node of origin + * @param toNode node of destination + * @return + */ + public boolean containsEdge(Node fromNode, Node toNode) { + lock.readLock().lock(); + try{ + Map endEdges = edgesMap.get(fromNode); + if (endEdges == null) { + return false; + } + + return endEdges.containsKey(toNode); + }finally { + lock.readLock().unlock(); + } + } + + + /** + * get node description + * + * @param node node + * @return + */ + public NodeInfo getNode(Node node) { + lock.readLock().lock(); + + try{ + return nodesMap.get(node); + }finally { + lock.readLock().unlock(); + } + } + + + /** + * Get the number of nodes + * + * @return + */ + public int getNodesCount() { + lock.readLock().lock(); + + try{ + return nodesMap.size(); + }finally { + lock.readLock().unlock(); + } + } + + /** + * Get the number of edges + * + * @return + */ + public int getEdgesCount() { + lock.readLock().lock(); + try{ + int count = 0; + + for (Map.Entry> entry : edgesMap.entrySet()) { + count += entry.getValue().size(); + } + + return count; + }finally { + lock.readLock().unlock(); + } + } + + + /** + * get the start node of DAG + * + * @return + */ + public Collection getBeginNode() { + lock.readLock().lock(); + + try{ + return CollectionUtils.subtract(nodesMap.keySet(), reverseEdgesMap.keySet()); + }finally { + lock.readLock().unlock(); + } + + } + + + /** + * get the end node of DAG + * + * @return + */ + public Collection getEndNode() { + + lock.readLock().lock(); + + try{ + return CollectionUtils.subtract(nodesMap.keySet(), edgesMap.keySet()); + }finally { + lock.readLock().unlock(); + } + + } + + + /** + * Gets all previous nodes of the node + * + * @param node node id to be calculated + * @return + */ + public Set getPreviousNodes(Node node) { + lock.readLock().lock(); + + try{ + return getNeighborNodes(node, reverseEdgesMap); + }finally { + lock.readLock().unlock(); + } + } + + + /** + * Get all subsequent nodes of the node + * + * @param node node id to be calculated + * @return + */ + public Set getSubsequentNodes(Node node) { + lock.readLock().lock(); + + try{ + return getNeighborNodes(node, edgesMap); + }finally { + lock.readLock().unlock(); + } + } + + + /** + * Gets the degree of entry of the node + * + * @param node node id + * @return + */ + public int getIndegree(Node node) { + lock.readLock().lock(); + + try{ + return getPreviousNodes(node).size(); + }finally { + lock.readLock().unlock(); + } + } + + + /** + * whether the graph has a ring + * + * @return true if has cycle, else return false. + */ + public boolean hasCycle() { + lock.readLock().lock(); + try{ + return !topologicalSortImpl().getKey(); + }finally { + lock.readLock().unlock(); + } + } + + + /** + * Only DAG has a topological sort + * @return topologically sorted results, returns false if the DAG result is a ring result + * @throws Exception + */ + public List topologicalSort() throws Exception { + lock.readLock().lock(); + + try{ + Map.Entry> entry = topologicalSortImpl(); + + if (entry.getKey()) { + return entry.getValue(); + } + + throw new Exception("serious error: graph has cycle ! "); + }finally { + lock.readLock().unlock(); + } + } + + + /** + * if tho node does not exist,add this node + * + * @param node node + * @param nodeInfo node information + */ + private void addNodeIfAbsent(Node node, NodeInfo nodeInfo) { + if (!containsNode(node)) { + addNode(node, nodeInfo); + } + } + + + /** + * add edge + * + * @param fromNode node of origin + * @param toNode node of destination + * @param edge edge description + * @param edges edge set + */ + private void addEdge(Node fromNode, Node toNode, EdgeInfo edge, Map> edges) { + edges.putIfAbsent(fromNode, new HashMap<>()); + Map toNodeEdges = edges.get(fromNode); + toNodeEdges.put(toNode, edge); + } + + + /** + * Whether an edge can be successfully added(fromNode -> toNode) + * need to determine whether the DAG has cycle + * + * @param fromNode node of origin + * @param toNode node of destination + * @param createNode whether to create a node + * @return + */ + private boolean isLegalAddEdge(Node fromNode, Node toNode, boolean createNode) { + if (fromNode.equals(toNode)) { + logger.error("edge fromNode({}) can't equals toNode({})", fromNode, toNode); + return false; + } + + if (!createNode) { + if (!containsNode(fromNode) || !containsNode(toNode)){ + logger.error("edge fromNode({}) or toNode({}) is not in vertices map", fromNode, toNode); + return false; + } + } + + // Whether an edge can be successfully added(fromNode -> toNode),need to determine whether the DAG has cycle! + int verticesCount = getNodesCount(); + + Queue queue = new LinkedList<>(); + + queue.add(toNode); + + // if DAG doesn't find fromNode, it's not has cycle! + while (!queue.isEmpty() && (--verticesCount > 0)) { + Node key = queue.poll(); + + for (Node subsequentNode : getSubsequentNodes(key)) { + if (subsequentNode.equals(fromNode)) { + return false; + } + + queue.add(subsequentNode); + } + } + + return true; + } + + + /** + * Get all neighbor nodes of the node + * + * @param node Node id to be calculated + * @param edges neighbor edge information + * @return + */ + private Set getNeighborNodes(Node node, final Map> edges) { + final Map neighborEdges = edges.get(node); + + if (neighborEdges == null) { + return Collections.EMPTY_MAP.keySet(); + } + + return neighborEdges.keySet(); + } + + + + /** + * Determine whether there are ring and topological sorting results + * + * Directed acyclic graph (DAG) has topological ordering + * Breadth First Search: + * 1、Traversal of all the vertices in the graph, the degree of entry is 0 vertex into the queue + * 2、Poll a vertex in the queue to update its adjacency (minus 1) and queue the adjacency if it is 0 after minus 1 + * 3、Do step 2 until the queue is empty + * If you cannot traverse all the nodes, it means that the current graph is not a directed acyclic graph. + * There is no topological sort. + * + * + * @return key Returns the state + * if success (acyclic) is true, failure (acyclic) is looped, + * and value (possibly one of the topological sort results) + */ + private Map.Entry> topologicalSortImpl() { + // node queue with degree of entry 0 + Queue zeroIndegreeNodeQueue = new LinkedList<>(); + // save result + List topoResultList = new ArrayList<>(); + // save the node whose degree is not 0 + Map notZeroIndegreeNodeMap = new HashMap<>(); + + // Scan all the vertices and push vertexs with an entry degree of 0 to queue + for (Map.Entry vertices : nodesMap.entrySet()) { + Node node = vertices.getKey(); + int inDegree = getIndegree(node); + + if (inDegree == 0) { + zeroIndegreeNodeQueue.add(node); + topoResultList.add(node); + } else { + notZeroIndegreeNodeMap.put(node, inDegree); + } + } + + /** + * After scanning, there is no node with 0 degree of entry, + * indicating that there is a ring, and return directly + */ + if(zeroIndegreeNodeQueue.isEmpty()){ + return new AbstractMap.SimpleEntry(false, topoResultList); + } + + // The topology algorithm is used to delete nodes with 0 degree of entry and its associated edges + while (!zeroIndegreeNodeQueue.isEmpty()) { + Node v = zeroIndegreeNodeQueue.poll(); + // Get the neighbor node + Set subsequentNodes = getSubsequentNodes(v); + + for (Node subsequentNode : subsequentNodes) { + + Integer degree = notZeroIndegreeNodeMap.get(subsequentNode); + + if(--degree == 0){ + topoResultList.add(subsequentNode); + zeroIndegreeNodeQueue.add(subsequentNode); + notZeroIndegreeNodeMap.remove(subsequentNode); + }else{ + notZeroIndegreeNodeMap.put(subsequentNode, degree); + } + + } + } + + // if notZeroIndegreeNodeMap is empty,there is no ring! + AbstractMap.SimpleEntry resultMap = new AbstractMap.SimpleEntry(notZeroIndegreeNodeMap.size() == 0 , topoResultList); + return resultMap; + + } + +} + diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/BaseDataSource.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/BaseDataSource.java new file mode 100644 index 0000000000..41a9b3a566 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/BaseDataSource.java @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.job.db; + +/** + * data source base class + */ +public abstract class BaseDataSource { + /** + * user name + */ + private String user; + + /** + * user password + */ + private String password; + + /** + * data source address + */ + private String address; + + /** + * database name + */ + private String database; + + /** + * other connection parameters for the data source + */ + private String other; + + /** + * principal + */ + private String principal; + + public String getPrincipal() { + return principal; + } + + public void setPrincipal(String principal) { + this.principal = principal; + } + /** + * test whether the data source can be connected successfully + * @throws Exception + */ + public abstract void isConnectable() throws Exception; + + /** + * gets the JDBC url for the data source connection + * @return + */ + public abstract String getJdbcUrl(); + + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + public void setAddress(String address) { + this.address = address; + } + + public String getAddress() { + return address; + } + + public String getDatabase() { + return database; + } + + public void setDatabase(String database) { + this.database = database; + } + + public String getOther() { + return other; + } + + public void setOther(String other) { + this.other = other; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/ClickHouseDataSource.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/ClickHouseDataSource.java new file mode 100644 index 0000000000..c2e1f86d30 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/ClickHouseDataSource.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.job.db; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; + +/** + * data source of ClickHouse + */ +public class ClickHouseDataSource extends BaseDataSource { + private static final Logger logger = LoggerFactory.getLogger(ClickHouseDataSource.class); + + /** + * gets the JDBC url for the data source connection + * @return + */ + @Override + public String getJdbcUrl() { + String jdbcUrl = getAddress(); + if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { + jdbcUrl += "/"; + } + + jdbcUrl += getDatabase(); + + if (StringUtils.isNotEmpty(getOther())) { + jdbcUrl += "?" + getOther(); + } + + return jdbcUrl; + } + + /** + * test whether the data source can be connected successfully + * @throws Exception + */ + @Override + public void isConnectable() throws Exception { + Connection con = null; + try { + Class.forName("ru.yandex.clickhouse.ClickHouseDriver"); + con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); + } finally { + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + logger.error("ClickHouse datasource try conn close conn error", e); + throw e; + } + } + } + + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DataSourceFactory.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DataSourceFactory.java new file mode 100644 index 0000000000..86492d80d5 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DataSourceFactory.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.job.db; + +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.Constants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * produce datasource in this custom defined datasource factory. + */ +public class DataSourceFactory { + + private static final Logger logger = LoggerFactory.getLogger(DataSourceFactory.class); + + public static BaseDataSource getDatasource(DbType dbType, String parameter) { + try { + switch (dbType) { + case MYSQL: + return JSONUtils.parseObject(parameter, MySQLDataSource.class); + case POSTGRESQL: + return JSONUtils.parseObject(parameter, PostgreDataSource.class); + case HIVE: + return JSONUtils.parseObject(parameter, HiveDataSource.class); + case SPARK: + return JSONUtils.parseObject(parameter, SparkDataSource.class); + case CLICKHOUSE: + return JSONUtils.parseObject(parameter, ClickHouseDataSource.class); + case ORACLE: + return JSONUtils.parseObject(parameter, OracleDataSource.class); + case SQLSERVER: + return JSONUtils.parseObject(parameter, SQLServerDataSource.class); + default: + return null; + } + } catch (Exception e) { + logger.error("get datasource object error", e); + return null; + } + } + + /** + * load class + * @param dbType + * @throws Exception + */ + public static void loadClass(DbType dbType) throws Exception{ + switch (dbType){ + case MYSQL : + Class.forName(Constants.JDBC_MYSQL_CLASS_NAME); + break; + case POSTGRESQL : + Class.forName(Constants.JDBC_POSTGRESQL_CLASS_NAME); + break; + case HIVE : + Class.forName(Constants.JDBC_HIVE_CLASS_NAME); + break; + case SPARK : + Class.forName(Constants.JDBC_SPARK_CLASS_NAME); + break; + case CLICKHOUSE : + Class.forName(Constants.JDBC_CLICKHOUSE_CLASS_NAME); + break; + case ORACLE : + Class.forName(Constants.JDBC_ORACLE_CLASS_NAME); + break; + case SQLSERVER: + Class.forName(Constants.JDBC_SQLSERVER_CLASS_NAME); + break; + default: + logger.error("not support sql type: {},can't load class", dbType); + throw new IllegalArgumentException("not support sql type,can't load class"); + + } + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/HiveDataSource.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/HiveDataSource.java new file mode 100644 index 0000000000..fc1be356f5 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/HiveDataSource.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.job.db; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.*; + +/** + * data source of hive + */ +public class HiveDataSource extends BaseDataSource { + + private static final Logger logger = LoggerFactory.getLogger(HiveDataSource.class); + + + + + /** + * gets the JDBC url for the data source connection + * @return + */ + @Override + public String getJdbcUrl() { + String jdbcUrl = getAddress(); + if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { + jdbcUrl += "/"; + } + + jdbcUrl += getDatabase(); + + if (StringUtils.isNotEmpty(getPrincipal())){ + jdbcUrl += ";principal=" + getPrincipal(); + } + + + + if (StringUtils.isNotEmpty(getOther())) { + jdbcUrl += ";" + getOther(); + } + + return jdbcUrl; + } + + /** + * test whether the data source can be connected successfully + * @throws Exception + */ + @Override + public void isConnectable() throws Exception { + Connection con = null; + try { + Class.forName("org.apache.hive.jdbc.HiveDriver"); + con = DriverManager.getConnection(getJdbcUrl(), getUser(), ""); + } finally { + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + logger.error("hive datasource try conn close conn error", e); + throw e; + } + } + } + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/MySQLDataSource.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/MySQLDataSource.java new file mode 100644 index 0000000000..0e850ee2de --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/MySQLDataSource.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.job.db; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; + +/** + * data source of mySQL + */ +public class MySQLDataSource extends BaseDataSource { + + private static final Logger logger = LoggerFactory.getLogger(MySQLDataSource.class); + + /** + * gets the JDBC url for the data source connection + * @return + */ + @Override + public String getJdbcUrl() { + String address = getAddress(); + if (address.lastIndexOf("/") != (address.length() - 1)) { + address += "/"; + } + String jdbcUrl = address + getDatabase(); + if (StringUtils.isNotEmpty(getOther())) { + jdbcUrl += "?" + getOther(); + } + return jdbcUrl; + } + + /** + * test whether the data source can be connected successfully + * @throws Exception + */ + @Override + public void isConnectable() throws Exception { + Connection con = null; + try { + Class.forName("com.mysql.jdbc.Driver"); + con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); + } finally { + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + logger.error("Mysql datasource try conn close conn error", e); + throw e; + } + } + } + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/OracleDataSource.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/OracleDataSource.java new file mode 100644 index 0000000000..67c035d5be --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/OracleDataSource.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.job.db; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; + +/** + * data source of Oracle + */ +public class OracleDataSource extends BaseDataSource { + private static final Logger logger = LoggerFactory.getLogger(OracleDataSource.class); + + /** + * gets the JDBC url for the data source connection + * @return + */ + @Override + public String getJdbcUrl() { + String jdbcUrl = getAddress(); + if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { + jdbcUrl += "/"; + } + + jdbcUrl += getDatabase(); + + if (StringUtils.isNotEmpty(getOther())) { + jdbcUrl += "?" + getOther(); + } + + return jdbcUrl; + } + + /** + * test whether the data source can be connected successfully + * @throws Exception + */ + @Override + public void isConnectable() throws Exception { + Connection con = null; + try { + Class.forName("oracle.jdbc.driver.OracleDriver"); + con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); + } finally { + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + logger.error("Oracle datasource try conn close conn error", e); + throw e; + } + } + } + + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/PostgreDataSource.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/PostgreDataSource.java new file mode 100644 index 0000000000..332a9cb936 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/PostgreDataSource.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.job.db; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; + +/** + * data source of postgreSQL + */ +public class PostgreDataSource extends BaseDataSource { + + private static final Logger logger = LoggerFactory.getLogger(PostgreDataSource.class); + + + /** + * gets the JDBC url for the data source connection + * @return + */ + @Override + public String getJdbcUrl() { + String jdbcUrl = getAddress(); + if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { + jdbcUrl += "/"; + } + + jdbcUrl += getDatabase(); + + if (StringUtils.isNotEmpty(getOther())) { + jdbcUrl += "?" + getOther(); + } + + return jdbcUrl; + } + + /** + * test whether the data source can be connected successfully + * @throws Exception + */ + @Override + public void isConnectable() throws Exception { + Connection con = null; + try { + Class.forName("org.postgresql.Driver"); + con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); + } finally { + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + logger.error("Postgre datasource try conn close conn error", e); + throw e; + } + } + } + + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SQLServerDataSource.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SQLServerDataSource.java new file mode 100644 index 0000000000..084b10d425 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SQLServerDataSource.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.job.db; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; + +/** + * data source of SQL Server + */ +public class SQLServerDataSource extends BaseDataSource { + private static final Logger logger = LoggerFactory.getLogger(SQLServerDataSource.class); + + /** + * gets the JDBC url for the data source connection + * @return + */ + @Override + public String getJdbcUrl() { + String jdbcUrl = getAddress(); + jdbcUrl += ";databaseName=" + getDatabase(); + + if (StringUtils.isNotEmpty(getOther())) { + jdbcUrl += ";" + getOther(); + } + + return jdbcUrl; + } + + /** + * test whether the data source can be connected successfully + * @throws Exception + */ + @Override + public void isConnectable() throws Exception { + Connection con = null; + try { + Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver"); + con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); + } finally { + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + logger.error("SQL Server datasource try conn close conn error", e); + throw e; + } + } + } + + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SparkDataSource.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SparkDataSource.java new file mode 100644 index 0000000000..a15ec9abda --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SparkDataSource.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.job.db; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; + +/** + * data source of spark + */ +public class SparkDataSource extends BaseDataSource { + + private static final Logger logger = LoggerFactory.getLogger(SparkDataSource.class); + + /** + * gets the JDBC url for the data source connection + * @return + */ + @Override + public String getJdbcUrl() { + String jdbcUrl = getAddress(); + if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { + jdbcUrl += "/"; + } + + jdbcUrl += getDatabase() + ";principal=" + getPrincipal(); + + if (StringUtils.isNotEmpty(getOther())) { + jdbcUrl += ";" + getOther(); + } + + return jdbcUrl; + } + + /** + * test whether the data source can be connected successfully + * @throws Exception + */ + @Override + public void isConnectable() throws Exception { + Connection con = null; + try { + Class.forName("org.apache.hive.jdbc.HiveDriver"); + con = DriverManager.getConnection(getJdbcUrl(), getUser(), ""); + } finally { + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + logger.error("Spark datasource try conn close conn error", e); + throw e; + } + } + } + + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DateInterval.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DateInterval.java new file mode 100644 index 0000000000..e3bca6ecbf --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DateInterval.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.model; + +import java.util.Date; + +/** + * date interval class + */ +public class DateInterval { + + private Date startTime; + + private Date endTime; + + public DateInterval(Date beginTime, Date endTime){ + this.startTime = beginTime; + this.endTime = endTime; + + } + + @Override + public boolean equals(Object obj) { + try{ + DateInterval dateInterval = (DateInterval) obj; + return startTime.equals(dateInterval.getStartTime()) && + endTime.equals(dateInterval.getEndTime()); + }catch (Exception e){ + return false; + } + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java new file mode 100644 index 0000000000..484a2f7ac8 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.model; + +import org.apache.dolphinscheduler.common.enums.DependResult; + +/** + * dependent item + */ +public class DependentItem { + + private int definitionId; + private String depTasks; + private String cycle; + private String dateValue; + private DependResult dependResult; + + + public String getKey(){ + return String.format("%d-%s-%s-%s", + getDefinitionId(), + getDepTasks(), + getCycle(), + getDateValue()); + } + + public int getDefinitionId() { + return definitionId; + } + + public void setDefinitionId(int definitionId) { + this.definitionId = definitionId; + } + + public String getDepTasks() { + return depTasks; + } + + public void setDepTasks(String depTasks) { + this.depTasks = depTasks; + } + + public String getCycle() { + return cycle; + } + + public void setCycle(String cycle) { + this.cycle = cycle; + } + + public String getDateValue() { + return dateValue; + } + + public void setDateValue(String dateValue) { + this.dateValue = dateValue; + } + + public DependResult getDependResult() { + return dependResult; + } + + public void setDependResult(DependResult dependResult) { + this.dependResult = dependResult; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentTaskModel.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentTaskModel.java new file mode 100644 index 0000000000..93647bae04 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentTaskModel.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.model; + +import org.apache.dolphinscheduler.common.enums.DependentRelation; + +import java.util.List; + +public class DependentTaskModel { + + + private List dependItemList; + private DependentRelation relation; + + public List getDependItemList() { + return dependItemList; + } + + public void setDependItemList(List dependItemList) { + this.dependItemList = dependItemList; + } + + public DependentRelation getRelation() { + return relation; + } + + public void setRelation(DependentRelation relation) { + this.relation = relation; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/MasterServer.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/MasterServer.java new file mode 100644 index 0000000000..cf5ae5f8c2 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/MasterServer.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.model; + + +import java.util.Date; + +/** + * master server + */ +public class MasterServer { + + /** + * id + */ + private int id; + + /** + * host + */ + private String host; + + /** + * port + */ + private int port; + + /** + * master direcotry in zookeeper + */ + private String zkDirectory; + + /** + * resource info: CPU and memory + */ + private String resInfo; + + /** + * create time + */ + private Date createTime; + + /** + * laster heart beat time + */ + private Date lastHeartbeatTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getZkDirectory() { + return zkDirectory; + } + + public void setZkDirectory(String zkDirectory) { + this.zkDirectory = zkDirectory; + } + + public Date getLastHeartbeatTime() { + return lastHeartbeatTime; + } + + public void setLastHeartbeatTime(Date lastHeartbeatTime) { + this.lastHeartbeatTime = lastHeartbeatTime; + } + + public String getResInfo() { + return resInfo; + } + + public void setResInfo(String resInfo) { + this.resInfo = resInfo; + } + + @Override + public String toString() { + return "MasterServer{" + + "id=" + id + + ", host='" + host + '\'' + + ", port=" + port + + ", zkDirectory='" + zkDirectory + '\'' + + ", resInfo='" + resInfo + '\'' + + ", createTime=" + createTime + + ", lastHeartbeatTime=" + lastHeartbeatTime + + '}'; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java new file mode 100644 index 0000000000..076966ef72 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java @@ -0,0 +1,324 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.model; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; +import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import com.alibaba.fastjson.JSONObject; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import org.apache.commons.lang3.StringUtils; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + + +public class TaskNode { + + /** + * task node id + */ + private String id; + + /** + * task node name + */ + private String name; + + /** + * task node description + */ + private String desc; + + /** + * task node type + */ + private String type; + + /** + * the run flag has two states, NORMAL or FORBIDDEN + */ + private String runFlag; + + /** + * the front field + */ + private String loc; + + /** + * maximum number of retries + */ + private int maxRetryTimes; + + /** + * Unit of retry interval: points + */ + private int retryInterval; + + /** + * params information + */ + @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) + @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) + private String params; + + /** + * inner dependency information + */ + @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) + @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) + private String preTasks; + + /** + * users store additional information + */ + @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) + @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) + private String extras; + + /** + * node dependency list + */ + private List depList; + + /** + * outer dependency information + */ + @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) + @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) + private String dependence; + + /** + * task instance priority + */ + private Priority taskInstancePriority; + + /** + * worker group id + */ + private int workerGroupId; + + + /** + * task time out + */ + @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) + @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) + private String timeout; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDesc() { + return desc; + } + + public void setDesc(String desc) { + this.desc = desc; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getParams() { + return params; + } + + public void setParams(String params) { + this.params = params; + } + + public String getPreTasks() { + return preTasks; + } + + public void setPreTasks(String preTasks) throws IOException { + this.preTasks = preTasks; + this.depList = JSONUtils.toList(preTasks, String.class); + } + + public String getExtras() { + return extras; + } + + public void setExtras(String extras) { + this.extras = extras; + } + + public List getDepList() { + return depList; + } + + public void setDepList(List depList) throws JsonProcessingException { + this.depList = depList; + this.preTasks = JSONUtils.toJson(depList); + } + + public String getLoc() { + return loc; + } + + public void setLoc(String loc) { + this.loc = loc; + } + + public String getRunFlag(){ + return runFlag; + } + + public void setRunFlag(String runFlag) { + this.runFlag = runFlag; + } + + public Boolean isForbidden(){ + return (StringUtils.isNotEmpty(this.runFlag) && + this.runFlag.equals(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN)); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TaskNode taskNode = (TaskNode) o; + return Objects.equals(name, taskNode.name) && + Objects.equals(desc, taskNode.desc) && + Objects.equals(type, taskNode.type) && + Objects.equals(params, taskNode.params) && + Objects.equals(preTasks, taskNode.preTasks) && + Objects.equals(extras, taskNode.extras) && + Objects.equals(runFlag, taskNode.runFlag) && + Objects.equals(dependence, taskNode.dependence) && + Objects.equals(workerGroupId, taskNode.workerGroupId) && + CollectionUtils.equalLists(depList, taskNode.depList); + } + + @Override + public int hashCode() { + return Objects.hash(name, desc, type, params, preTasks, extras, depList, runFlag); + } + + public String getDependence() { + return dependence; + } + + public void setDependence(String dependence) { + this.dependence = dependence; + } + + public int getMaxRetryTimes() { + return maxRetryTimes; + } + + public void setMaxRetryTimes(int maxRetryTimes) { + this.maxRetryTimes = maxRetryTimes; + } + + public int getRetryInterval() { + return retryInterval; + } + + public void setRetryInterval(int retryInterval) { + this.retryInterval = retryInterval; + } + + public Priority getTaskInstancePriority() { + return taskInstancePriority; + } + + public void setTaskInstancePriority(Priority taskInstancePriority) { + this.taskInstancePriority = taskInstancePriority; + } + + public String getTimeout() { + return timeout; + } + + public void setTimeout(String timeout) { + this.timeout = timeout; + } + + /** + * get task time out parameter + * @return + */ + public TaskTimeoutParameter getTaskTimeoutParameter() { + if(StringUtils.isNotEmpty(this.getTimeout())){ + String formatStr = String.format("%s,%s", TaskTimeoutStrategy.WARN.name(), TaskTimeoutStrategy.FAILED.name()); + String timeout = this.getTimeout().replace(formatStr,TaskTimeoutStrategy.WARNFAILED.name()); + return JSONObject.parseObject(timeout,TaskTimeoutParameter.class); + } + return new TaskTimeoutParameter(false); + } + + @Override + public String toString() { + return "TaskNode{" + + "id='" + id + '\'' + + ", name='" + name + '\'' + + ", desc='" + desc + '\'' + + ", type='" + type + '\'' + + ", runFlag='" + runFlag + '\'' + + ", loc='" + loc + '\'' + + ", maxRetryTimes=" + maxRetryTimes + + ", retryInterval=" + retryInterval + + ", params='" + params + '\'' + + ", preTasks='" + preTasks + '\'' + + ", extras='" + extras + '\'' + + ", depList=" + depList + + ", dependence='" + dependence + '\'' + + ", taskInstancePriority=" + taskInstancePriority + + ", timeout='" + timeout + '\'' + + ", workerGroupId='" + workerGroupId + '\'' + + '}'; + } + + public int getWorkerGroupId() { + return workerGroupId; + } + + public void setWorkerGroupId(int workerGroupId) { + this.workerGroupId = workerGroupId; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNodeRelation.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNodeRelation.java new file mode 100644 index 0000000000..f1a78029b6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNodeRelation.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.model; + +public class TaskNodeRelation { + + /** + * task start node name + */ + private String startNode; + + /** + * task end node name + */ + private String endNode; + + public TaskNodeRelation() { + } + + public TaskNodeRelation(String startNode, String endNode) { + this.startNode = startNode; + this.endNode = endNode; + } + + public String getStartNode() { + return startNode; + } + + public void setStartNode(String startNode) { + this.startNode = startNode; + } + + public String getEndNode() { + return endNode; + } + + public void setEndNode(String endNode) { + this.endNode = endNode; + } + + + public boolean equals(TaskNodeRelation e){ + return (e.getStartNode() == this.startNode && e.getEndNode() == this.endNode); + } + + @Override + public String toString() { + return "TaskNodeRelation{" + + "startNode='" + startNode + '\'' + + ", endNode='" + endNode + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/HttpProperty.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/HttpProperty.java new file mode 100644 index 0000000000..71a3fd9db8 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/HttpProperty.java @@ -0,0 +1,125 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.process; + +import org.apache.dolphinscheduler.common.enums.HttpParametersType; + +import java.util.Objects; + +public class HttpProperty { + /** + * key + */ + private String prop; + + /** + * httpParametersType + */ + private HttpParametersType httpParametersType; + + /** + * value + */ + private String value; + + public HttpProperty() { + } + + public HttpProperty(String prop, HttpParametersType httpParametersType, String value) { + this.prop = prop; + this.httpParametersType = httpParametersType; + this.value = value; + } + + /** + * getter method + * + * @return the prop + * @see HttpProperty#prop + */ + public String getProp() { + return prop; + } + + /** + * setter method + * + * @param prop the prop to set + * @see HttpProperty#prop + */ + public void setProp(String prop) { + this.prop = prop; + } + + /** + * getter method + * + * @return the value + * @see HttpProperty#value + */ + public String getValue() { + return value; + } + + /** + * setter method + * + * @param value the value to set + * @see HttpProperty#value + */ + public void setValue(String value) { + this.value = value; + } + + public HttpParametersType getHttpParametersType() { + return httpParametersType; + } + + public void setHttpParametersType(HttpParametersType httpParametersType) { + this.httpParametersType = httpParametersType; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + HttpProperty property = (HttpProperty) o; + return Objects.equals(prop, property.prop) && + Objects.equals(value, property.value); + } + + + @Override + public int hashCode() { + return Objects.hash(prop, value); + } + + @Override + public String toString() { + return "HttpProperty{" + + "prop='" + prop + '\'' + + ", httpParametersType=" + httpParametersType + + ", value='" + value + '\'' + + '}'; + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ProcessDag.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ProcessDag.java new file mode 100644 index 0000000000..465fcbe2f6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ProcessDag.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.process; + + + +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; + +import java.util.List; + +public class ProcessDag { + + /** + * DAG edge list + **/ + private List edges; + + /** + * DAG node list + */ + private List nodes; + + /** + * getter method + * + * @return the edges + * @see ProcessDag#edges + */ + public List getEdges() { + return edges; + } + + /** + * setter method + * + * @param edges the edges to set + * @see ProcessDag#edges + */ + public void setEdges(List edges) { + this.edges = edges; + } + + /** + * getter method + * + * @return the nodes + * @see ProcessDag#nodes + */ + public List getNodes() { + return nodes; + } + + /** + * setter method + * + * @param nodes the nodes to set + * @see ProcessDag#nodes + */ + public void setNodes(List nodes) { + this.nodes = nodes; + } + + @Override + public String toString() { + return "ProcessDag{" + + "edges=" + edges + + ", nodes=" + nodes + + '}'; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/Property.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/Property.java new file mode 100644 index 0000000000..a0c7a928a1 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/Property.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.process; + + +import org.apache.dolphinscheduler.common.enums.DataType; +import org.apache.dolphinscheduler.common.enums.Direct; + +import java.util.Objects; + +public class Property { + /** + * key + */ + private String prop; + + /** + * input/output + */ + private Direct direct; + + /** + * data type + */ + private DataType type; + + /** + * value + */ + private String value; + + public Property() { + } + + public Property(String prop,Direct direct,DataType type,String value) { + this.prop = prop; + this.direct = direct; + this.type = type; + this.value = value; + } + + /** + * getter method + * + * @return the prop + * @see Property#prop + */ + public String getProp() { + return prop; + } + + /** + * setter method + * + * @param prop the prop to set + * @see Property#prop + */ + public void setProp(String prop) { + this.prop = prop; + } + + /** + * getter method + * + * @return the value + * @see Property#value + */ + public String getValue() { + return value; + } + + /** + * setter method + * + * @param value the value to set + * @see Property#value + */ + public void setValue(String value) { + this.value = value; + } + + + public Direct getDirect() { + return direct; + } + + public void setDirect(Direct direct) { + this.direct = direct; + } + + public DataType getType() { + return type; + } + + public void setType(DataType type) { + this.type = type; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Property property = (Property) o; + return Objects.equals(prop, property.prop) && + Objects.equals(value, property.value); + } + + + @Override + public int hashCode() { + return Objects.hash(prop, value); + } + + @Override + public String toString() { + return "Property{" + + "prop='" + prop + '\'' + + ", direct=" + direct + + ", type=" + type + + ", value='" + value + '\'' + + '}'; + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java new file mode 100644 index 0000000000..3c95ac648b --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.process; + +/** + * resource info + */ +public class ResourceInfo { + /** + * res the name of the resource that was uploaded + */ + private String res; + + public String getRes() { + return res; + } + + public void setRes(String res) { + this.res = res; + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/ITaskQueue.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/ITaskQueue.java new file mode 100644 index 0000000000..054e25dd3a --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/ITaskQueue.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.queue; + +import java.util.List; +import java.util.Set; + +public interface ITaskQueue { + + /** + * take out all the elements + * + * + * @param key + * @return + */ + List getAllTasks(String key); + + /** + * check task exists in the task queue or not + * + * @param key queue name + * @param task ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} + * @return true if exists in the queue + */ + boolean checkTaskExists(String key, String task); + + /** + * add an element to the queue + * + * @param key queue name + * @param value + */ + void add(String key, String value); + + /** + * an element pops out of the queue + * + * @param key queue name + * @param n how many elements to poll + * @return + */ + List poll(String key, int n); + + /** + * remove a element from queue + * @param key + * @param value + */ + void removeNode(String key, String value); + + /** + * add an element to the set + * + * @param key + * @param value + */ + void sadd(String key, String value); + + /** + * delete the value corresponding to the key in the set + * + * @param key + * @param value + */ + void srem(String key, String value); + + /** + * gets all the elements of the set based on the key + * + * @param key + * @return + */ + Set smembers(String key); + + + /** + * clear the task queue for use by junit tests only + */ + void delete(); +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueFactory.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueFactory.java new file mode 100644 index 0000000000..efc2dd36bc --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueFactory.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.queue; + +import org.apache.dolphinscheduler.common.utils.CommonUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * task queue factory + */ +public class TaskQueueFactory { + + private static final Logger logger = LoggerFactory.getLogger(TaskQueueFactory.class); + + + private TaskQueueFactory(){ + + } + + + /** + * get instance (singleton) + * + * @return instance + */ + public static ITaskQueue getTaskQueueInstance() { + String queueImplValue = CommonUtils.getQueueImplValue(); + if (StringUtils.isNotBlank(queueImplValue)) { + logger.info("task queue impl use zookeeper "); + return TaskQueueZkImpl.getInstance(); + }else{ + logger.error("property escheduler.queue.impl can't be blank, system will exit "); + System.exit(-1); + } + + return null; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueZkImpl.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueZkImpl.java new file mode 100644 index 0000000000..e2fd56daff --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueZkImpl.java @@ -0,0 +1,439 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.queue; + + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.Bytes; +import org.apache.dolphinscheduler.common.utils.IpUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.common.zk.AbstractZKClient; +import org.apache.curator.framework.CuratorFramework; +import org.apache.zookeeper.CreateMode; +import org.apache.zookeeper.data.Stat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A singleton of a task queue implemented with zookeeper + * tasks queue implemention + */ +public class TaskQueueZkImpl extends AbstractZKClient implements ITaskQueue { + + private static final Logger logger = LoggerFactory.getLogger(TaskQueueZkImpl.class); + + private static volatile TaskQueueZkImpl instance; + + private TaskQueueZkImpl(){ + init(); + } + + public static TaskQueueZkImpl getInstance(){ + if (null == instance) { + synchronized (TaskQueueZkImpl.class) { + if(null == instance) { + instance = new TaskQueueZkImpl(); + } + } + } + return instance; + } + + + /** + * get all tasks from tasks queue + * @param key task queue name + * @return + */ + @Override + public List getAllTasks(String key) { + try { + List list = getZkClient().getChildren().forPath(getTasksPath(key)); + + return list; + } catch (Exception e) { + logger.error("get all tasks from tasks queue exception",e); + } + + return new ArrayList(); + } + + /** + * check task exists in the task queue or not + * + * @param key queue name + * @param task ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} + * @return true if exists in the queue + */ + @Override + public boolean checkTaskExists(String key, String task) { + String taskPath = getTasksPath(key) + Constants.SINGLE_SLASH + task; + + try { + Stat stat = zkClient.checkExists().forPath(taskPath); + + if(null == stat){ + logger.info("check task:{} not exist in task queue",task); + return false; + }else{ + logger.info("check task {} exists in task queue ",task); + return true; + } + + } catch (Exception e) { + logger.info(String.format("task {} check exists in task queue exception ", task), e); + } + + return false; + } + + + /** + * add task to tasks queue + * + * @param key task queue name + * @param value ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_host1,host2,... + */ + @Override + public void add(String key, String value) { + try { + String taskIdPath = getTasksPath(key) + Constants.SINGLE_SLASH + value; + String result = getZkClient().create().withMode(CreateMode.PERSISTENT).forPath(taskIdPath, Bytes.toBytes(value)); + + logger.info("add task : {} to tasks queue , result success",result); + } catch (Exception e) { + logger.error("add task to tasks queue exception",e); + } + + } + + + /** + * An element pops out of the queue

+ * note: + * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_host1,host2,... + * The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low. + * + * 流程优先级_流程实例id_任务优先级_任务id_任务执行的机器id1,任务执行的机器id2,... high <- low + * @param key task queue name + * @param tasksNum how many elements to poll + * @return the task ids to be executed + */ + @Override + public List poll(String key, int tasksNum) { + try{ + CuratorFramework zk = getZkClient(); + String tasksQueuePath = getTasksPath(key) + Constants.SINGLE_SLASH; + List list = zk.getChildren().forPath(getTasksPath(key)); + + if(list != null && list.size() > 0){ + + String workerIp = OSUtils.getHost(); + String workerIpLongStr = String.valueOf(IpUtils.ipToLong(workerIp)); + + int size = list.size(); + + + Set taskTreeSet = new TreeSet<>(new Comparator() { + @Override + public int compare(String o1, String o2) { + + String s1 = o1; + String s2 = o2; + String[] s1Array = s1.split(Constants.UNDERLINE); + if(s1Array.length>4){ + // warning: if this length > 5, need to be changed + s1 = s1.substring(0, s1.lastIndexOf(Constants.UNDERLINE) ); + } + + String[] s2Array = s2.split(Constants.UNDERLINE); + if(s2Array.length>4){ + // warning: if this length > 5, need to be changed + s2 = s2.substring(0, s2.lastIndexOf(Constants.UNDERLINE) ); + } + + return s1.compareTo(s2); + } + }); + + for (int i = 0; i < size; i++) { + + String taskDetail = list.get(i); + String[] taskDetailArrs = taskDetail.split(Constants.UNDERLINE); + + //forward compatibility 向前版本兼容 + if(taskDetailArrs.length >= 4){ + + //format ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} + String formatTask = String.format("%s_%010d_%s_%010d", taskDetailArrs[0], Long.parseLong(taskDetailArrs[1]), taskDetailArrs[2], Long.parseLong(taskDetailArrs[3])); + if(taskDetailArrs.length > 4){ + String taskHosts = taskDetailArrs[4]; + + //task can assign to any worker host if equals default ip value of worker server + if(!taskHosts.equals(String.valueOf(Constants.DEFAULT_WORKER_ID))){ + String[] taskHostsArr = taskHosts.split(Constants.COMMA); + if(!Arrays.asList(taskHostsArr).contains(workerIpLongStr)){ + continue; + } + } + formatTask += Constants.UNDERLINE + taskDetailArrs[4]; + } + taskTreeSet.add(formatTask); + + } + + } + + List taskslist = getTasksListFromTreeSet(tasksNum, taskTreeSet); + + logger.info("consume tasks: {},there still have {} tasks need to be executed", Arrays.toString(taskslist.toArray()), size - taskslist.size()); + + return taskslist; + }else{ + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + } + + } catch (Exception e) { + logger.error("add task to tasks queue exception",e); + } + return new ArrayList(); + } + + + /** + * get task list from tree set + * + * @param tasksNum + * @param taskTreeSet + */ + public List getTasksListFromTreeSet(int tasksNum, Set taskTreeSet) { + Iterator iterator = taskTreeSet.iterator(); + int j = 0; + List taskslist = new ArrayList<>(tasksNum); + while(iterator.hasNext()){ + if(j++ >= tasksNum){ + break; + } + String task = iterator.next(); + taskslist.add(getOriginTaskFormat(task)); + } + return taskslist; + } + + /** + * format ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} + * processInstanceId and task id need to be convert to int. + * @param formatTask + * @return + */ + private String getOriginTaskFormat(String formatTask){ + String[] taskArray = formatTask.split(Constants.UNDERLINE); + if(taskArray.length< 4){ + return formatTask; + } + int processInstanceId = Integer.parseInt(taskArray[1]); + int taskId = Integer.parseInt(taskArray[3]); + + StringBuilder sb = new StringBuilder(50); + String destTask = String.format("%s_%s_%s_%s", taskArray[0], processInstanceId, taskArray[2], taskId); + + sb.append(destTask); + + if(taskArray.length > 4){ + for(int index = 4; index < taskArray.length; index++){ + sb.append(Constants.UNDERLINE).append(taskArray[index]); + } + } + return sb.toString(); + } + + @Override + public void removeNode(String key, String nodeValue){ + + CuratorFramework zk = getZkClient(); + String tasksQueuePath = getTasksPath(key) + Constants.SINGLE_SLASH; + String taskIdPath = tasksQueuePath + nodeValue; + logger.info("consume task {}", taskIdPath); + try{ + Stat stat = zk.checkExists().forPath(taskIdPath); + if(stat != null){ + zk.delete().forPath(taskIdPath); + } + }catch(Exception e){ + logger.error(String.format("delete task:%s from zookeeper fail, exception:" ,nodeValue) ,e); + } + + } + + + + /** + * In order to be compatible with redis implementation + * + * To be compatible with the redis implementation, add an element to the set + * @param key The key is the kill/cancel queue path name + * @param value host-taskId The name of the zookeeper node + */ + @Override + public void sadd(String key,String value) { + try { + + if(value != null && value.trim().length() > 0){ + String path = getTasksPath(key) + Constants.SINGLE_SLASH; + CuratorFramework zk = getZkClient(); + Stat stat = zk.checkExists().forPath(path + value); + + if(null == stat){ + String result = zk.create().withMode(CreateMode.PERSISTENT).forPath(path + value,Bytes.toBytes(value)); + logger.info("add task:{} to tasks set result:{} ",value,result); + }else{ + logger.info("task {} exists in tasks set ",value); + } + + }else{ + logger.warn("add host-taskId:{} to tasks set is empty ",value); + } + + } catch (Exception e) { + logger.error("add task to tasks set exception",e); + } + } + + + /** + * delete the value corresponding to the key in the set + * @param key The key is the kill/cancel queue path name + * @param value host-taskId-taskType The name of the zookeeper node + */ + @Override + public void srem(String key, String value) { + try{ + String path = getTasksPath(key) + Constants.SINGLE_SLASH; + CuratorFramework zk = getZkClient(); + Stat stat = zk.checkExists().forPath(path + value); + + if(null != stat){ + zk.delete().forPath(path + value); + logger.info("delete task:{} from tasks set ",value); + }else{ + logger.info("delete task:{} from tasks set fail, there is no this task",value); + } + + }catch(Exception e){ + logger.error(String.format("delete task:" + value + " exception"),e); + } + } + + + /** + * Gets all the elements of the set based on the key + * @param key The key is the kill/cancel queue path name + * @return + */ + @Override + public Set smembers(String key) { + + Set tasksSet = new HashSet<>(); + + try { + List list = getZkClient().getChildren().forPath(getTasksPath(key)); + + for (String task : list) { + tasksSet.add(task); + } + + return tasksSet; + } catch (Exception e) { + logger.error("get all tasks from tasks queue exception",e); + } + + return tasksSet; + } + + + + /** + * Init the task queue of zookeeper node + */ + private void init(){ + try { + String tasksQueuePath = getTasksPath(Constants.SCHEDULER_TASKS_QUEUE); + String tasksCancelPath = getTasksPath(Constants.SCHEDULER_TASKS_KILL); + + for(String taskQueuePath : new String[]{tasksQueuePath,tasksCancelPath}){ + if(zkClient.checkExists().forPath(taskQueuePath) == null){ + // create a persistent parent node + zkClient.create().creatingParentContainersIfNeeded() + .withMode(CreateMode.PERSISTENT).forPath(taskQueuePath); + logger.info("create tasks queue parent node success : {} ",taskQueuePath); + } + } + + } catch (Exception e) { + logger.error("create zk node failure",e); + } + } + + + /** + * Clear the task queue of zookeeper node + */ + @Override + public void delete(){ + try { + String tasksQueuePath = getTasksPath(Constants.SCHEDULER_TASKS_QUEUE); + String tasksCancelPath = getTasksPath(Constants.SCHEDULER_TASKS_KILL); + + for(String taskQueuePath : new String[]{tasksQueuePath,tasksCancelPath}){ + if(zkClient.checkExists().forPath(taskQueuePath) != null){ + + List list = zkClient.getChildren().forPath(taskQueuePath); + + for (String task : list) { + zkClient.delete().forPath(taskQueuePath + Constants.SINGLE_SLASH + task); + logger.info("delete task from tasks queue : {}/{} ",taskQueuePath,task); + + } + + } + } + + } catch (Exception e) { + logger.error("delete all tasks in tasks queue failure",e); + } + } + + /** + * Get the task queue path + * @param key task queue name + * @return + */ + public String getTasksPath(String key){ + return conf.getString(Constants.ZOOKEEPER_SCHEDULER_ROOT) + Constants.SINGLE_SLASH + key; + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java new file mode 100644 index 0000000000..d4ca6edd43 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java @@ -0,0 +1,341 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.shell; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.Map; +import java.util.Set; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicBoolean; + + +/** + * A base class for running a Unix command. + * + * AbstractShell can be used to run unix commands like du or + * df. It also offers facilities to gate commands by + * time-intervals. + */ +public abstract class AbstractShell { + + private static final Logger logger = LoggerFactory.getLogger(AbstractShell.class); + + + + /** + * Time after which the executing script would be timedout + */ + protected long timeOutInterval = 0L; + /** + * If or not script timed out + */ + private AtomicBoolean timedOut; + + /** + * refresh interval in msec + */ + private long interval; + + /** + * last time the command was performed + */ + private long lastTime; + + /** + * env for the command execution + */ + private Map environment; + private File dir; + + /** + * sub process used to execute the command + */ + private Process process; + private int exitCode; + + /** + * If or not script finished executing + */ + private volatile AtomicBoolean completed; + + public AbstractShell() { + this(0L); + } + + /** + * @param interval the minimum duration to wait before re-executing the + * command. + */ + public AbstractShell(long interval ) { + this.interval = interval; + this.lastTime = (interval<0) ? 0 : -interval; + } + + + + /** + * set the environment for the command + * @param env Mapping of environment variables + */ + protected void setEnvironment(Map env) { + this.environment = env; + } + + /** + * set the working directory + * @param dir The directory where the command would be executed + */ + protected void setWorkingDirectory(File dir) { + this.dir = dir; + } + + /** + * check to see if a command needs to be executed and execute if needed + */ + protected void run() throws IOException { + if (lastTime + interval > System.currentTimeMillis()) { + return; + } + // reset for next run + exitCode = 0; + runCommand(); + } + + + /** + * Run a command actual work + */ + private void runCommand() throws IOException { + ProcessBuilder builder = new ProcessBuilder(getExecString()); + Timer timeOutTimer = null; + ShellTimeoutTimerTask timeoutTimerTask = null; + timedOut = new AtomicBoolean(false); + completed = new AtomicBoolean(false); + + if (environment != null) { + builder.environment().putAll(this.environment); + } + if (dir != null) { + builder.directory(this.dir); + } + + process = builder.start(); + ProcessContainer.putProcess(process); + + if (timeOutInterval > 0) { + timeOutTimer = new Timer(); + timeoutTimerTask = new ShellTimeoutTimerTask( + this); + //One time scheduling. + timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); + } + final BufferedReader errReader = + new BufferedReader(new InputStreamReader(process + .getErrorStream())); + BufferedReader inReader = + new BufferedReader(new InputStreamReader(process + .getInputStream())); + final StringBuilder errMsg = new StringBuilder(); + + // read error and input streams as this would free up the buffers + // free the error stream buffer + Thread errThread = new Thread() { + @Override + public void run() { + try { + String line = errReader.readLine(); + while((line != null) && !isInterrupted()) { + errMsg.append(line); + errMsg.append(System.getProperty("line.separator")); + line = errReader.readLine(); + } + } catch(IOException ioe) { + logger.warn("Error reading the error stream", ioe); + } + } + }; + try { + errThread.start(); + } catch (IllegalStateException ise) { } + try { + // parse the output + parseExecResult(inReader); + exitCode = process.waitFor(); + try { + // make sure that the error thread exits + errThread.join(); + } catch (InterruptedException ie) { + logger.warn("Interrupted while reading the error stream", ie); + } + completed.set(true); + //the timeout thread handling + //taken care in finally block + if (exitCode != 0) { + throw new ExitCodeException(exitCode, errMsg.toString()); + } + } catch (InterruptedException ie) { + throw new IOException(ie.toString()); + } finally { + if ((timeOutTimer!=null) && !timedOut.get()) { + timeOutTimer.cancel(); + } + // close the input stream + try { + inReader.close(); + } catch (IOException ioe) { + logger.warn("Error while closing the input stream", ioe); + } + if (!completed.get()) { + errThread.interrupt(); + } + try { + errReader.close(); + } catch (IOException ioe) { + logger.warn("Error while closing the error stream", ioe); + } + ProcessContainer.removeProcess(process); + process.destroy(); + lastTime = System.currentTimeMillis(); + } + } + + /** + * return an array containing the command name & its parameters + * */ + protected abstract String[] getExecString(); + + /** + * Parse the execution result + * */ + protected abstract void parseExecResult(BufferedReader lines) + throws IOException; + + /** + * get the current sub-process executing the given command + * @return process executing the command + */ + public Process getProcess() { + return process; + } + + /** get the exit code + * @return the exit code of the process + */ + public int getExitCode() { + return exitCode; + } + + /** + * Set if the command has timed out. + * + */ + private void setTimedOut() { + this.timedOut.set(true); + } + + + + /** + * Timer which is used to timeout scripts spawned off by shell. + */ + private static class ShellTimeoutTimerTask extends TimerTask { + + private AbstractShell shell; + + public ShellTimeoutTimerTask(AbstractShell shell) { + this.shell = shell; + } + + @Override + public void run() { + Process p = shell.getProcess(); + try { + p.exitValue(); + } catch (Exception e) { + //Process has not terminated. + //So check if it has completed + //if not just destroy it. + if (p != null && !shell.completed.get()) { + shell.setTimedOut(); + p.destroy(); + } + } + } + } + + /** + * This is an IOException with exit code added. + */ + public static class ExitCodeException extends IOException { + int exitCode; + + public ExitCodeException(int exitCode, String message) { + super(message); + this.exitCode = exitCode; + } + + public int getExitCode() { + return exitCode; + } + } + + /** + * process manage container + * + */ + public static class ProcessContainer extends ConcurrentHashMap{ + private static final ProcessContainer container = new ProcessContainer(); + private ProcessContainer(){ + super(); + } + public static final ProcessContainer getInstance(){ + return container; + } + + public static void putProcess(Process process){ + getInstance().put(process.hashCode(), process); + } + public static int processSize(){ + return getInstance().size(); + } + + public static void removeProcess(Process process){ + getInstance().remove(process.hashCode()); + } + + public static void destroyAllProcess(){ + Set> set = getInstance().entrySet(); + for (Entry entry : set) { + try{ + entry.getValue().destroy(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + logger.info("close " + set.size() + " executing process tasks"); + } + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/ShellExecutor.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/ShellExecutor.java new file mode 100644 index 0000000000..832bcf6c9f --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/ShellExecutor.java @@ -0,0 +1,175 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.shell; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.util.Map; + +/** + * shell command executor. + * + * ShellExecutor should be used in cases where the output + * of the command needs no explicit parsing and where the command, working + * directory and the environment remains unchanged. The output of the command + * is stored as-is and is expected to be small. + */ +public class ShellExecutor extends AbstractShell { + + private String[] command; + private StringBuffer output; + + + public ShellExecutor(String... execString) { + this(execString, null); + } + + public ShellExecutor(String[] execString, File dir) { + this(execString, dir, null); + } + + public ShellExecutor(String[] execString, File dir, + Map env) { + this(execString, dir, env , 0L); + } + + /** + * Create a new instance of the ShellExecutor to execute a command. + * + * @param execString The command to execute with arguments + * @param dir If not-null, specifies the directory which should be set + * as the current working directory for the command. + * If null, the current working directory is not modified. + * @param env If not-null, environment of the command will include the + * key-value pairs specified in the map. If null, the current + * environment is not modified. + * @param timeout Specifies the time in milliseconds, after which the + * command will be killed and the status marked as timedout. + * If 0, the command will not be timed out. + */ + public ShellExecutor(String[] execString, File dir, + Map env, long timeout) { + command = execString.clone(); + if (dir != null) { + setWorkingDirectory(dir); + } + if (env != null) { + setEnvironment(env); + } + timeOutInterval = timeout; + } + + + /** + * Static method to execute a shell command. + * Covers most of the simple cases without requiring the user to implement + * the AbstractShell interface. + * @param cmd shell command to execute. + * @return the output of the executed command. + */ + public static String execCommand(String... cmd) throws IOException { + return execCommand(null, cmd, 0L); + } + + /** + * Static method to execute a shell command. + * Covers most of the simple cases without requiring the user to implement + * the AbstractShell interface. + * @param env the map of environment key=value + * @param cmd shell command to execute. + * @param timeout time in milliseconds after which script should be marked timeout + * @return the output of the executed command.o + */ + + public static String execCommand(Map env, String[] cmd, + long timeout) throws IOException { + ShellExecutor exec = new ShellExecutor(cmd, null, env, + timeout); + exec.execute(); + return exec.getOutput(); + } + + /** + * Static method to execute a shell command. + * Covers most of the simple cases without requiring the user to implement + * the AbstractShell interface. + * @param env the map of environment key=value + * @param cmd shell command to execute. + * @return the output of the executed command. + */ + public static String execCommand(Map env, String ... cmd) + throws IOException { + return execCommand(env, cmd, 0L); + } + + /** + * Execute the shell command + * + */ + public void execute() throws IOException { + this.run(); + } + + @Override + protected String[] getExecString() { + return command; + } + + @Override + protected void parseExecResult(BufferedReader lines) throws IOException { + output = new StringBuffer(); + char[] buf = new char[1024]; + int nRead; + String line = ""; + while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) { + line = new String(buf,0,nRead); + } + output.append(line); + } + + /** + * + * Get the output of the shell command + */ + public String getOutput() { + return (output == null) ? "" : output.toString(); + } + + + /** + * Returns the commands of this instance. + * Arguments with spaces in are presented with quotes round; other + * arguments are presented raw + * + * @return a string representation of the object + */ + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + String[] args = getExecString(); + for (String s : args) { + if (s.indexOf(' ') >= 0) { + builder.append('"').append(s).append('"'); + } else { + builder.append(s); + } + builder.append(' '); + } + return builder.toString(); + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java new file mode 100644 index 0000000000..2d0322a6d7 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task; + +import org.apache.dolphinscheduler.common.process.Property; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * job params related class + */ +public abstract class AbstractParameters implements IParameters { + + @Override + public abstract boolean checkParameters(); + + @Override + public abstract List getResourceFilesList(); + + /** + * local parameters + */ + public List localParams; + + /** + * get local parameters list + * @return + */ + public List getLocalParams() { + return localParams; + } + + public void setLocalParams(List localParams) { + this.localParams = localParams; + } + + /** + * get local parameters map + * @return + */ + public Map getLocalParametersMap() { + if (localParams != null) { + Map localParametersMaps = new LinkedHashMap<>(); + + for (Property property : localParams) { + localParametersMaps.put(property.getProp(),property); + } + return localParametersMaps; + } + return null; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/IParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/IParameters.java new file mode 100644 index 0000000000..8fb49eb1fa --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/IParameters.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task; + +import java.util.List; + +/** + * job params interface + */ +public interface IParameters { + /** + * check parameters is valid + * + * @return + */ + boolean checkParameters(); + + /** + * get project resource files list + * + * @return resource files list + */ + List getResourceFilesList(); +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/TaskTimeoutParameter.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/TaskTimeoutParameter.java new file mode 100644 index 0000000000..245b1358a6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/TaskTimeoutParameter.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task; + +import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; + +/** + * task timeout parameter + */ +public class TaskTimeoutParameter { + + private boolean enable; + /** + * task timeout strategy + */ + private TaskTimeoutStrategy strategy; + /** + * task timeout interval + */ + private int interval; + + public boolean getEnable() { + return enable; + } + + public void setEnable(boolean enable) { + this.enable = enable; + } + + public TaskTimeoutStrategy getStrategy() { + return strategy; + } + + public void setStrategy(TaskTimeoutStrategy strategy) { + this.strategy = strategy; + } + + public int getInterval() { + return interval; + } + + public void setInterval(int interval) { + this.interval = interval; + } + + public TaskTimeoutParameter() { + } + + public TaskTimeoutParameter(boolean enable) { + this.enable = enable; + } + + public TaskTimeoutParameter(boolean enable, TaskTimeoutStrategy strategy, int interval) { + this.enable = enable; + this.strategy = strategy; + this.interval = interval; + } + + @Override + public String toString() { + return "TaskTimeoutParameter{" + + "enable=" + enable + + ", strategy=" + strategy + + ", interval=" + interval + + '}'; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java new file mode 100644 index 0000000000..9ff1405722 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.dependent; + +import org.apache.dolphinscheduler.common.enums.DependentRelation; +import org.apache.dolphinscheduler.common.model.DependentTaskModel; +import org.apache.dolphinscheduler.common.task.AbstractParameters; + +import java.util.ArrayList; +import java.util.List; + +public class DependentParameters extends AbstractParameters { + + private List dependTaskList; + private DependentRelation relation; + + + + @Override + public boolean checkParameters() { + return true; + } + + @Override + public List getResourceFilesList() { + return new ArrayList<>(); + } + + public List getDependTaskList() { + return dependTaskList; + } + + public void setDependTaskList(List dependTaskList) { + this.dependTaskList = dependTaskList; + } + + public DependentRelation getRelation() { + return relation; + } + + public void setRelation(DependentRelation relation) { + this.relation = relation; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java new file mode 100644 index 0000000000..0638b3858e --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java @@ -0,0 +1,219 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.flink; + +import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.common.process.ResourceInfo; +import org.apache.dolphinscheduler.common.task.AbstractParameters; + +import java.util.List; +import java.util.stream.Collectors; + +/** + * spark parameters + */ +public class FlinkParameters extends AbstractParameters { + + /** + * major jar + */ + private ResourceInfo mainJar; + + /** + * major class + */ + private String mainClass; + + /** + * deploy mode yarn-cluster yarn-client yarn-local + */ + private String deployMode; + + /** + * arguments + */ + private String mainArgs; + + /** + * slot个数 + */ + private int slot; + + /** + *Yarn application的名字 + */ + + private String appName; + + /** + * taskManager 数量 + */ + private int taskManager; + + /** + * jobManagerMemory 内存大小 + */ + private String jobManagerMemory ; + + /** + * taskManagerMemory内存大小 + */ + private String taskManagerMemory; + + /** + * resource list + */ + private List resourceList; + + /** + * The YARN queue to submit to + */ + private String queue; + + /** + * other arguments + */ + private String others; + + /** + * program type + * 0 JAVA,1 SCALA,2 PYTHON + */ + private ProgramType programType; + + public ResourceInfo getMainJar() { + return mainJar; + } + + public void setMainJar(ResourceInfo mainJar) { + this.mainJar = mainJar; + } + + public String getMainClass() { + return mainClass; + } + + public void setMainClass(String mainClass) { + this.mainClass = mainClass; + } + + public String getDeployMode() { + return deployMode; + } + + public void setDeployMode(String deployMode) { + this.deployMode = deployMode; + } + + public String getMainArgs() { + return mainArgs; + } + + public void setMainArgs(String mainArgs) { + this.mainArgs = mainArgs; + } + + public int getSlot() { + return slot; + } + + public void setSlot(int slot) { + this.slot = slot; + } + + public String getAppName() { + return appName; + } + + public void setAppName(String appName) { + this.appName = appName; + } + + public int getTaskManager() { + return taskManager; + } + + public void setTaskManager(int taskManager) { + this.taskManager = taskManager; + } + + public String getJobManagerMemory() { + return jobManagerMemory; + } + + public void setJobManagerMemory(String jobManagerMemory) { + this.jobManagerMemory = jobManagerMemory; + } + + public String getTaskManagerMemory() { + return taskManagerMemory; + } + + public void setTaskManagerMemory(String taskManagerMemory) { + this.taskManagerMemory = taskManagerMemory; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + public List getResourceList() { + return resourceList; + } + + public void setResourceList(List resourceList) { + this.resourceList = resourceList; + } + + public String getOthers() { + return others; + } + + public void setOthers(String others) { + this.others = others; + } + + public ProgramType getProgramType() { + return programType; + } + + public void setProgramType(ProgramType programType) { + this.programType = programType; + } + + @Override + public boolean checkParameters() { + return mainJar != null && programType != null; + } + + + @Override + public List getResourceFilesList() { + if(resourceList !=null ) { + this.resourceList.add(mainJar); + return resourceList.stream() + .map(p -> p.getRes()).collect(Collectors.toList()); + } + return null; + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java new file mode 100644 index 0000000000..00b01afce3 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.http; + +import org.apache.dolphinscheduler.common.enums.HttpCheckCondition; +import org.apache.dolphinscheduler.common.enums.HttpMethod; +import org.apache.dolphinscheduler.common.process.HttpProperty; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.commons.lang.StringUtils; + +import java.util.ArrayList; +import java.util.List; + +/** + * http parameter + */ +public class HttpParameters extends AbstractParameters { + /** + * url + */ + private String url; + + /** + * httpMethod + */ + private HttpMethod httpMethod; + + /** + * http params + */ + private List httpParams; + + /** + * httpCheckCondition + */ + private HttpCheckCondition httpCheckCondition = HttpCheckCondition.STATUS_CODE_DEFAULT; + + /** + * condition + */ + private String condition; + + + + @Override + public boolean checkParameters() { + return StringUtils.isNotEmpty(url); + } + + @Override + public List getResourceFilesList() { + return new ArrayList<>(); + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public HttpMethod getHttpMethod() { + return httpMethod; + } + + public void setHttpMethod(HttpMethod httpMethod) { + this.httpMethod = httpMethod; + } + + public List getHttpParams() { + return httpParams; + } + + public void setHttpParams(List httpParams) { + this.httpParams = httpParams; + } + + public HttpCheckCondition getHttpCheckCondition() { + return httpCheckCondition; + } + + public void setHttpCheckCondition(HttpCheckCondition httpCheckCondition) { + this.httpCheckCondition = httpCheckCondition; + } + + public String getCondition() { + return condition; + } + + public void setCondition(String condition) { + this.condition = condition; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java new file mode 100644 index 0000000000..b8fd6ebcbf --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java @@ -0,0 +1,145 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.mr; + +import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.common.process.ResourceInfo; +import org.apache.dolphinscheduler.common.task.AbstractParameters; + +import java.util.List; +import java.util.stream.Collectors; + +public class MapreduceParameters extends AbstractParameters { + + /** + * major jar + */ + private ResourceInfo mainJar; + + /** + * major class + */ + private String mainClass; + + /** + * arguments + */ + private String mainArgs; + + /** + * other arguments + */ + private String others; + + /** + * queue + */ + private String queue; + + /** + * resource list + */ + private List resourceList; + + /** + * program type + * 0 JAVA,1 SCALA,2 PYTHON + */ + private ProgramType programType; + + + public String getMainClass() { + return mainClass; + } + + public void setMainClass(String mainClass) { + this.mainClass = mainClass; + } + + public String getMainArgs() { + return mainArgs; + } + + public void setMainArgs(String mainArgs) { + this.mainArgs = mainArgs; + } + + public String getOthers() { + return others; + } + + public void setOthers(String others) { + this.others = others; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + public List getResourceList() { + return this.resourceList; + } + + public void setResourceList(List resourceList) { + this.resourceList = resourceList; + } + + public void setMainJar(ResourceInfo mainJar) { + this.mainJar = mainJar; + } + + public ResourceInfo getMainJar() { + return mainJar; + } + + public ProgramType getProgramType() { + return programType; + } + + public void setProgramType(ProgramType programType) { + this.programType = programType; + } + + @Override + public boolean checkParameters() { + return this.mainJar != null && this.programType != null; + } + + @Override + public List getResourceFilesList() { + if (resourceList != null) { + this.resourceList.add(mainJar); + return resourceList.stream() + .map(p -> p.getRes()).collect(Collectors.toList()); + } + return null; + } + + @Override + public String toString() { + return "mainJar= " + mainJar + + "mainClass=" + mainClass + + "mainArgs=" + mainArgs + + "queue=" + queue + + "other mainArgs=" + others + ; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/procedure/ProcedureParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/procedure/ProcedureParameters.java new file mode 100644 index 0000000000..56ae65547d --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/procedure/ProcedureParameters.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.procedure; + +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.commons.lang.StringUtils; + +import java.util.ArrayList; +import java.util.List; + + +/** + * procedure parameter + */ +public class ProcedureParameters extends AbstractParameters { + + /** + * data source type,eg MYSQL, POSTGRES, HIVE ... + */ + private String type; + + /** + * data source id + */ + private int datasource; + + /** + * procedure name + */ + private String method; + + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public int getDatasource() { + return datasource; + } + + public void setDatasource(int datasource) { + this.datasource = datasource; + } + + public String getMethod() { + return method; + } + + public void setMethod(String method) { + this.method = method; + } + + @Override + public boolean checkParameters() { + return datasource != 0 && StringUtils.isNotEmpty(type) && StringUtils.isNotEmpty(method); + } + + @Override + public List getResourceFilesList() { + return new ArrayList<>(); + } + + @Override + public String toString() { + return "ProcessdureParam{" + + "type='" + type + '\'' + + ", datasource=" + datasource + + ", method='" + method + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/python/PythonParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/python/PythonParameters.java new file mode 100644 index 0000000000..ae9cb4c7da --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/python/PythonParameters.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.python; + + +import org.apache.dolphinscheduler.common.process.ResourceInfo; +import org.apache.dolphinscheduler.common.task.AbstractParameters; + +import java.util.List; +import java.util.stream.Collectors; + +public class PythonParameters extends AbstractParameters { + /** + * origin python script + */ + private String rawScript; + + /** + * resource list + */ + private List resourceList; + + public String getRawScript() { + return rawScript; + } + + public void setRawScript(String rawScript) { + this.rawScript = rawScript; + } + + public List getResourceList() { + return resourceList; + } + + public void setResourceList(List resourceList) { + this.resourceList = resourceList; + } + + @Override + public boolean checkParameters() { + return rawScript != null && !rawScript.isEmpty(); + } + + @Override + public List getResourceFilesList() { + if (resourceList != null) { + return resourceList.stream() + .map(p -> p.getRes()).collect(Collectors.toList()); + } + + return null; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/shell/ShellParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/shell/ShellParameters.java new file mode 100644 index 0000000000..85b8acb46a --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/shell/ShellParameters.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.shell; + + +import org.apache.dolphinscheduler.common.process.ResourceInfo; +import org.apache.dolphinscheduler.common.task.AbstractParameters; + +import java.util.List; +import java.util.stream.Collectors; + +/** + * shell parameters + */ +public class ShellParameters extends AbstractParameters { + /** + * shell script + */ + private String rawScript; + + /** + * resource list + */ + private List resourceList; + + public String getRawScript() { + return rawScript; + } + + public void setRawScript(String rawScript) { + this.rawScript = rawScript; + } + + public List getResourceList() { + return resourceList; + } + + public void setResourceList(List resourceList) { + this.resourceList = resourceList; + } + + @Override + public boolean checkParameters() { + return rawScript != null && !rawScript.isEmpty(); + } + + @Override + public List getResourceFilesList() { + if (resourceList != null) { + return resourceList.stream() + .map(p -> p.getRes()).collect(Collectors.toList()); + } + + return null; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java new file mode 100644 index 0000000000..41263f0a74 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java @@ -0,0 +1,220 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.spark; + +import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.common.process.ResourceInfo; +import org.apache.dolphinscheduler.common.task.AbstractParameters; + +import java.util.List; +import java.util.stream.Collectors; + +/** + * spark parameters + */ +public class SparkParameters extends AbstractParameters { + + /** + * major jar + */ + private ResourceInfo mainJar; + + /** + * major class + */ + private String mainClass; + + /** + * deploy mode + */ + private String deployMode; + + /** + * arguments + */ + private String mainArgs; + + /** + * driver-cores Number of cores used by the driver, only in cluster mode + */ + private int driverCores; + + /** + * driver-memory Memory for driver + */ + + private String driverMemory; + + /** + * num-executors Number of executors to launch + */ + private int numExecutors; + + /** + * executor-cores Number of cores per executor + */ + private int executorCores; + + /** + * Memory per executor + */ + private String executorMemory; + + /** + * resource list + */ + private List resourceList; + + /** + * The YARN queue to submit to + */ + private String queue; + + /** + * other arguments + */ + private String others; + + /** + * program type + * 0 JAVA,1 SCALA,2 PYTHON + */ + private ProgramType programType; + + public ResourceInfo getMainJar() { + return mainJar; + } + + public void setMainJar(ResourceInfo mainJar) { + this.mainJar = mainJar; + } + + public String getMainClass() { + return mainClass; + } + + public void setMainClass(String mainClass) { + this.mainClass = mainClass; + } + + public String getDeployMode() { + return deployMode; + } + + public void setDeployMode(String deployMode) { + this.deployMode = deployMode; + } + + public String getMainArgs() { + return mainArgs; + } + + public void setMainArgs(String mainArgs) { + this.mainArgs = mainArgs; + } + + public int getDriverCores() { + return driverCores; + } + + public void setDriverCores(int driverCores) { + this.driverCores = driverCores; + } + + public String getDriverMemory() { + return driverMemory; + } + + public void setDriverMemory(String driverMemory) { + this.driverMemory = driverMemory; + } + + public int getNumExecutors() { + return numExecutors; + } + + public void setNumExecutors(int numExecutors) { + this.numExecutors = numExecutors; + } + + public int getExecutorCores() { + return executorCores; + } + + public void setExecutorCores(int executorCores) { + this.executorCores = executorCores; + } + + public String getExecutorMemory() { + return executorMemory; + } + + public void setExecutorMemory(String executorMemory) { + this.executorMemory = executorMemory; + } + + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + public List getResourceList() { + return resourceList; + } + + public void setResourceList(List resourceList) { + this.resourceList = resourceList; + } + + public String getOthers() { + return others; + } + + public void setOthers(String others) { + this.others = others; + } + + public ProgramType getProgramType() { + return programType; + } + + public void setProgramType(ProgramType programType) { + this.programType = programType; + } + + @Override + public boolean checkParameters() { + return mainJar != null && programType != null; + } + + + @Override + public List getResourceFilesList() { + if(resourceList !=null ) { + this.resourceList.add(mainJar); + return resourceList.stream() + .map(p -> p.getRes()).collect(Collectors.toList()); + } + return null; + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlBinds.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlBinds.java new file mode 100644 index 0000000000..1096185f45 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlBinds.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.sql; + +import org.apache.dolphinscheduler.common.process.Property; + +import java.util.Map; + +/** + * Used to contains both prepared sql string and its to-be-bind parameters + */ +public class SqlBinds { + private final String sql; + private final Map paramsMap; + + public SqlBinds(String sql, Map paramsMap) { + this.sql = sql; + this.paramsMap = paramsMap; + } + + public String getSql() { + return sql; + } + + public Map getParamsMap() { + return paramsMap; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java new file mode 100644 index 0000000000..d65204a386 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.sql; + +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.commons.lang.StringUtils; + +import java.util.ArrayList; +import java.util.List; + +/** + * Sql/Hql parameter + */ +public class SqlParameters extends AbstractParameters { + /** + * data source type,eg MYSQL, POSTGRES, HIVE ... + */ + private String type; + + /** + * datasource id + */ + private int datasource; + + /** + * sql + */ + private String sql; + + /** + * sql type + * 0 query + * 1 NON_QUERY + */ + private int sqlType; + + /** + * udf list + */ + private String udfs; + /** + * show type + * 0 TABLE + * 1 TEXT + * 2 attachment + * 3 TABLE+attachment + */ + private String showType; + /** + * SQL connection parameters + */ + private String connParams; + /** + * Pre Statements + */ + private List preStatements; + /** + * Post Statements + */ + private List postStatements; + + /** + * title + */ + private String title; + + /** + * receivers + */ + private String receivers; + + /** + * receivers cc + */ + private String receiversCc; + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public int getDatasource() { + return datasource; + } + + public void setDatasource(int datasource) { + this.datasource = datasource; + } + + public String getSql() { + return sql; + } + + public void setSql(String sql) { + this.sql = sql; + } + + public String getUdfs() { + return udfs; + } + + public void setUdfs(String udfs) { + this.udfs = udfs; + } + + public int getSqlType() { + return sqlType; + } + + public void setSqlType(int sqlType) { + this.sqlType = sqlType; + } + + public String getShowType() { + return showType; + } + + public void setShowType(String showType) { + this.showType = showType; + } + + public String getConnParams() { + return connParams; + } + + public void setConnParams(String connParams) { + this.connParams = connParams; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getReceivers() { + return receivers; + } + + public void setReceivers(String receivers) { + this.receivers = receivers; + } + + public String getReceiversCc() { + return receiversCc; + } + + public void setReceiversCc(String receiversCc) { + this.receiversCc = receiversCc; + } + public List getPreStatements() { + return preStatements; + } + + public void setPreStatements(List preStatements) { + this.preStatements = preStatements; + } + + public List getPostStatements() { + return postStatements; + } + + public void setPostStatements(List postStatements) { + this.postStatements = postStatements; + } + + @Override + public boolean checkParameters() { + return datasource != 0 && StringUtils.isNotEmpty(type) && StringUtils.isNotEmpty(sql); + } + + @Override + public List getResourceFilesList() { + return new ArrayList<>(); + } + + @Override + public String toString() { + return "SqlParameters{" + + "type='" + type + '\'' + + ", datasource=" + datasource + + ", sql='" + sql + '\'' + + ", sqlType=" + sqlType + + ", udfs='" + udfs + '\'' + + ", showType='" + showType + '\'' + + ", connParams='" + connParams + '\'' + + ", title='" + title + '\'' + + ", receivers='" + receivers + '\'' + + ", receiversCc='" + receiversCc + '\'' + + ", preStatements=" + preStatements + + ", postStatements=" + postStatements + + '}'; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlType.java new file mode 100644 index 0000000000..871900a742 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlType.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.sql; + + +public enum SqlType { + /** + * sql type + * 0 query + * 1 NON_QUERY + */ + QUERY, NON_QUERY +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java new file mode 100644 index 0000000000..21e3ce2248 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.subprocess; +import org.apache.dolphinscheduler.common.task.AbstractParameters; + +import java.util.ArrayList; +import java.util.List; + + +public class SubProcessParameters extends AbstractParameters { + + /** + * process definition id + */ + private Integer processDefinitionId; + + public void setProcessDefinitionId(Integer processDefinitionId){ + this.processDefinitionId = processDefinitionId; + } + + public Integer getProcessDefinitionId(){ + return this.processDefinitionId; + } + + @Override + public boolean checkParameters() { + return this.processDefinitionId != 0; + } + + @Override + public List getResourceFilesList() { + return new ArrayList<>(); + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/Stopper.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/Stopper.java new file mode 100644 index 0000000000..1c58f879a0 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/Stopper.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.thread; + +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * if the process closes, a signal is placed as true, and all threads get this flag to stop working + */ +public class Stopper { + + private static volatile AtomicBoolean signal = new AtomicBoolean(false); + + public static final boolean isStoped(){ + return signal.get(); + } + + public static final boolean isRunning(){ + return !signal.get(); + } + + public static final void stop(){ + signal.getAndSet(true); + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadPoolExecutors.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadPoolExecutors.java new file mode 100644 index 0000000000..f88ea6d127 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadPoolExecutors.java @@ -0,0 +1,310 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.thread; + + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.PrintWriter; +import java.lang.management.ThreadInfo; +import java.util.List; +import java.util.Map; +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicLong; + + +/** + * + * thread pool's single instance + * + */ +public class ThreadPoolExecutors { + + private static final Logger logger = LoggerFactory.getLogger(ThreadPoolExecutors.class); + private static Executor executor; + private static volatile ThreadPoolExecutors threadPoolExecutors; + + private ThreadPoolExecutors(){} + + + public static ThreadPoolExecutors getInstance(){ + return getInstance("thread_pool",0); + } + + public static ThreadPoolExecutors getInstance(String name, int maxThreads){ + + if (null == threadPoolExecutors) { + + synchronized (ThreadPoolExecutors.class) { + + if(null == threadPoolExecutors) { + threadPoolExecutors = new ThreadPoolExecutors(); + } + if(null == executor) { + executor = new Executor(null == name? "thread_pool" : name, maxThreads == 0? Runtime.getRuntime().availableProcessors() * 3 : maxThreads); + } + } + } + + return threadPoolExecutors; + } + + /** + * Executes the given task sometime in the future. The task may execute in a new thread or in an existing pooled thread. + * If the task cannot be submitted for execution, either because this executor has been shutdown or because its capacity has been reached, + * the task is handled by the current RejectedExecutionHandler. + * @param event + */ + public void execute(final Runnable event) { + Executor executor = getExecutor(); + if (executor == null) { + logger.error("Cannot execute [" + event + "] because the executor is missing."); + } else { + executor.execute(event); + } + } + + + public Future submit(Runnable event) { + Executor executor = getExecutor(); + if (executor == null) { + logger.error("Cannot submit [" + event + "] because the executor is missing."); + } else { + return executor.submit(event); + } + + return null; + + } + + + public Future submit(Callable task) { + Executor executor = getExecutor(); + if (executor == null) { + logger.error("Cannot submit [" + task + "] because the executor is missing."); + } else { + return executor.submit(task); + } + + return null; + } + + + + public void printStatus() { + Executor executor = getExecutor(); + executor.getStatus().dumpInfo(); + } + + + private Executor getExecutor() { + return executor; + } + + + public void shutdown() { + if (executor != null) { + List wasRunning = executor.threadPoolExecutor + .shutdownNow(); + if (!wasRunning.isEmpty()) { + logger.info(executor + " had " + wasRunning + " on shutdown"); + } + } + } + + + /** + * Executor instance. + */ + private static class Executor { + /** + * how long to retain excess threads + */ + final long keepAliveTimeInMillis = 1000; + /** + * the thread pool executor that services the requests + */ + final TrackingThreadPoolExecutor threadPoolExecutor; + /** + * work queue to use - unbounded queue + */ + final BlockingQueue q = new LinkedBlockingQueue(); + private final String name; + private static final AtomicLong seqids = new AtomicLong(0); + private final long id; + + protected Executor(String name, int maxThreads) { + this.id = seqids.incrementAndGet(); + this.name = name; + //create the thread pool executor + this.threadPoolExecutor = new TrackingThreadPoolExecutor( + maxThreads, maxThreads, keepAliveTimeInMillis, + TimeUnit.MILLISECONDS, q); + // name the threads for this threadpool + ThreadFactoryBuilder tfb = new ThreadFactoryBuilder(); + tfb.setNameFormat(this.name + "-%d"); + this.threadPoolExecutor.setThreadFactory(tfb.build()); + } + + /** + * Submit the event to the queue for handling. + * + * @param event + */ + void execute(final Runnable event) { + this.threadPoolExecutor.execute(event); + } + + Future submit(Runnable event) { + return this.threadPoolExecutor.submit(event); + } + + Future submit(Callable event) { + return this.threadPoolExecutor.submit(event); + } + + + @Override + public String toString() { + return getClass().getSimpleName() + "-" + id + "-" + name; + } + + public ExecutorStatus getStatus() { + List queuedEvents = Lists.newArrayList(); + for (Runnable r : q) { + queuedEvents.add(r); + } + + List running = Lists.newArrayList(); + for (Map.Entry e : threadPoolExecutor + .getRunningTasks().entrySet()) { + Runnable r = e.getValue(); + running.add(new RunningEventStatus(e.getKey(), r)); + } + + return new ExecutorStatus(this, queuedEvents, running); + } + } + + + /** + * A subclass of ThreadPoolExecutor that keeps track of the Runnables that + * are executing at any given point in time. + */ + static class TrackingThreadPoolExecutor extends ThreadPoolExecutor { + private ConcurrentMap running = Maps + .newConcurrentMap(); + + public TrackingThreadPoolExecutor(int corePoolSize, + int maximumPoolSize, long keepAliveTime, TimeUnit unit, + BlockingQueue workQueue) { + super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue); + } + + @Override + protected void afterExecute(Runnable r, Throwable t) { + super.afterExecute(r, t); + running.remove(Thread.currentThread()); + } + + @Override + protected void beforeExecute(Thread t, Runnable r) { + Runnable oldPut = running.put(t, r); + assert oldPut == null : "inconsistency for thread " + t; + super.beforeExecute(t, r); + } + + /** + * @return a map of the threads currently running tasks inside this + * executor. Each key is an active thread, and the value is the + * task that is currently running. Note that this is not a + * stable snapshot of the map. + */ + public ConcurrentMap getRunningTasks() { + return running; + } + } + + + /** + * A snapshot of the status of a particular executor. This includes the + * contents of the executor's pending queue, as well as the threads and + * events currently being processed. + * + * This is a consistent snapshot that is immutable once constructed. + */ + public static class ExecutorStatus { + final Executor executor; + final List queuedEvents; + final List running; + + ExecutorStatus(Executor executor, List queuedEvents, + List running) { + this.executor = executor; + this.queuedEvents = queuedEvents; + this.running = running; + } + + public void dumpInfo() { + + PrintWriter out = new PrintWriter(System.out); + + out.write("Status for executor: " + executor + "\n"); + out.write("=======================================\n"); + out.write(queuedEvents.size() + " events queued, " + + running.size() + " running\n"); + if (!queuedEvents.isEmpty()) { + out.write("Queued:\n"); + for (Runnable e : queuedEvents) { + out.write(" " + e + "\n"); + } + out.write("\n"); + } + if (!running.isEmpty()) { + out.write("Running:\n"); + for (RunningEventStatus stat : running) { + out.write(" Running on thread '" + + stat.threadInfo.getThreadName() + "': " + + stat.event + "\n"); + out.write(ThreadUtils.formatThreadInfo( + stat.threadInfo, " ")); + out.write("\n"); + } + } + out.flush(); + } + } + + + /** + * The status of a particular event that is in the middle of being handled + * by an executor. + */ + public static class RunningEventStatus { + final ThreadInfo threadInfo; + final Runnable event; + + public RunningEventStatus(Thread t, Runnable event) { + this.threadInfo = ThreadUtils.getThreadInfo(t); + this.event = event; + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java new file mode 100644 index 0000000000..0a4ed9b5ac --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.thread; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; + +import java.lang.management.ManagementFactory; +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; +import java.util.concurrent.*; + +/** + * thread utils + */ +public class ThreadUtils { + + + private static final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); + private static final int STACK_DEPTH = 20; + + /** + Wrapper over newCachedThreadPool. Thread names are formatted as prefix-ID, where ID is a + * unique, sequentially assigned integer. + * @param prefix + * @return + */ + public static ThreadPoolExecutor newDaemonCachedThreadPool(String prefix){ + ThreadFactory threadFactory = namedThreadFactory(prefix); + return ((ThreadPoolExecutor) Executors.newCachedThreadPool(threadFactory)); + } + + /** + * Create a thread factory that names threads with a prefix and also sets the threads to daemon. + * @param prefix + * @return + */ + private static ThreadFactory namedThreadFactory(String prefix) { + return new ThreadFactoryBuilder().setDaemon(true).setNameFormat(prefix + "-%d").build(); + } + + + /** + * Create a cached thread pool whose max number of threads is `maxThreadNumber`. Thread names + * are formatted as prefix-ID, where ID is a unique, sequentially assigned integer. + * @param prefix + * @param maxThreadNumber + * @param keepAliveSeconds + * @return + */ + public static ThreadPoolExecutor newDaemonCachedThreadPool(String prefix , + int maxThreadNumber, + int keepAliveSeconds){ + ThreadFactory threadFactory = namedThreadFactory(prefix); + ThreadPoolExecutor threadPool = new ThreadPoolExecutor( + // corePoolSize: the max number of threads to create before queuing the tasks + maxThreadNumber, + // maximumPoolSize: because we use LinkedBlockingDeque, this one is not used + maxThreadNumber, + keepAliveSeconds, + TimeUnit.SECONDS, + new LinkedBlockingQueue(), + threadFactory); + threadPool.allowCoreThreadTimeOut(true); + return threadPool; + } + + + /** + * Wrapper over newFixedThreadPool. Thread names are formatted as prefix-ID, where ID is a + * unique, sequentially assigned integer. + * @param nThreads + * @param prefix + * @return + */ + public static ThreadPoolExecutor newDaemonFixedThreadPool(int nThreads , String prefix){ + ThreadFactory threadFactory = namedThreadFactory(prefix); + return ((ThreadPoolExecutor) Executors.newFixedThreadPool(nThreads, threadFactory)); + } + + /** + * Wrapper over newSingleThreadExecutor. + * @param threadName + * @return + */ + public static ExecutorService newDaemonSingleThreadExecutor(String threadName){ + ThreadFactory threadFactory = new ThreadFactoryBuilder() + .setDaemon(true) + .setNameFormat(threadName) + .build(); + return Executors.newSingleThreadExecutor(threadFactory); + } + + /** + * Wrapper over newDaemonFixedThreadExecutor. + * @param threadName + * @param threadsNum + * @return + */ + public static ExecutorService newDaemonFixedThreadExecutor(String threadName,int threadsNum){ + ThreadFactory threadFactory = new ThreadFactoryBuilder() + .setDaemon(true) + .setNameFormat(threadName) + .build(); + return Executors.newFixedThreadPool(threadsNum,threadFactory); + } + + /** + * Wrapper over ScheduledThreadPoolExecutor + * @param corePoolSize + * @return + */ + public static ScheduledExecutorService newDaemonThreadScheduledExecutor(String threadName,int corePoolSize) { + ThreadFactory threadFactory = new ThreadFactoryBuilder() + .setDaemon(true) + .setNameFormat(threadName) + .build(); + ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(corePoolSize, threadFactory); + // By default, a cancelled task is not automatically removed from the work queue until its delay + // elapses. We have to enable it manually. + executor.setRemoveOnCancelPolicy(true); + return executor; + } + + + public static ThreadInfo getThreadInfo(Thread t) { + long tid = t.getId(); + return threadBean.getThreadInfo(tid, STACK_DEPTH); + } + + + /** + * Format the given ThreadInfo object as a String. + * @param indent a prefix for each line, used for nested indentation + */ + public static String formatThreadInfo(ThreadInfo threadInfo, String indent) { + StringBuilder sb = new StringBuilder(); + appendThreadInfo(sb, threadInfo, indent); + return sb.toString(); + } + + + /** + * Print all of the thread's information and stack traces. + * + * @param sb + * @param info + * @param indent + */ + public static void appendThreadInfo(StringBuilder sb, + ThreadInfo info, + String indent) { + boolean contention = threadBean.isThreadContentionMonitoringEnabled(); + + if (info == null) { + sb.append(indent).append("Inactive (perhaps exited while monitoring was done)\n"); + return; + } + String taskName = getTaskName(info.getThreadId(), info.getThreadName()); + sb.append(indent).append("Thread ").append(taskName).append(":\n"); + + Thread.State state = info.getThreadState(); + sb.append(indent).append(" State: ").append(state).append("\n"); + sb.append(indent).append(" Blocked count: ").append(info.getBlockedCount()).append("\n"); + sb.append(indent).append(" Waited count: ").append(info.getWaitedCount()).append("\n"); + if (contention) { + sb.append(indent).append(" Blocked time: " + info.getBlockedTime()).append("\n"); + sb.append(indent).append(" Waited time: " + info.getWaitedTime()).append("\n"); + } + if (state == Thread.State.WAITING) { + sb.append(indent).append(" Waiting on ").append(info.getLockName()).append("\n"); + } else if (state == Thread.State.BLOCKED) { + sb.append(indent).append(" Blocked on ").append(info.getLockName()).append("\n"); + sb.append(indent).append(" Blocked by ").append( + getTaskName(info.getLockOwnerId(), info.getLockOwnerName())).append("\n"); + } + sb.append(indent).append(" Stack:").append("\n"); + for (StackTraceElement frame: info.getStackTrace()) { + sb.append(indent).append(" ").append(frame.toString()).append("\n"); + } + } + + private static String getTaskName(long id, String name) { + if (name == null) { + return Long.toString(id); + } + return id + " (" + name + ")"; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Bytes.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Bytes.java new file mode 100644 index 0000000000..4fb490bae6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Bytes.java @@ -0,0 +1,697 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; + +/** + * Utility class that handles Bytes + */ +public class Bytes { + + private static final Logger logger = LoggerFactory.getLogger(Bytes.class); + public static final String UTF8_ENCODING = "UTF-8"; + //An empty instance. + public static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; + + /** + * Size of int in bytes + */ + public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; + + /** + * Size of long in bytes + */ + public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; + + /** + * Size of short in bytes + */ + public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; + + + + /** + * Put bytes at the specified byte array position. + * @param tgtBytes the byte array + * @param tgtOffset position in the array + * @param srcBytes array to write out + * @param srcOffset source offset + * @param srcLength source length + * @return incremented offset + */ + public static int putBytes(byte[] tgtBytes, int tgtOffset, byte[] srcBytes, + int srcOffset, int srcLength) { + System.arraycopy(srcBytes, srcOffset, tgtBytes, tgtOffset, srcLength); + return tgtOffset + srcLength; + } + + /** + * Write a single byte out to the specified byte array position. + * @param bytes the byte array + * @param offset position in the array + * @param b byte to write out + * @return incremented offset + */ + public static int putByte(byte[] bytes, int offset, byte b) { + bytes[offset] = b; + return offset + 1; + } + + /** + * Returns a new byte array, copied from the passed ByteBuffer. + * @param bb A ByteBuffer + * @return the byte array + */ + public static byte[] toBytes(ByteBuffer bb) { + int length = bb.limit(); + byte [] result = new byte[length]; + System.arraycopy(bb.array(), bb.arrayOffset(), result, 0, length); + return result; + } + + /** + * @param b Presumed UTF-8 encoded byte array. + * @return String made from b + */ + public static String toString(final byte [] b) { + if (b == null) { + return null; + } + return toString(b, 0, b.length); + } + + /** + * Joins two byte arrays together using a separator. + * @param b1 The first byte array. + * @param sep The separator to use. + * @param b2 The second byte array. + */ + public static String toString(final byte [] b1, + String sep, + final byte [] b2) { + return toString(b1, 0, b1.length) + sep + toString(b2, 0, b2.length); + } + + /** + * This method will convert utf8 encoded bytes into a string. If + * an UnsupportedEncodingException occurs, this method will eat it + * and return null instead. + * + * @param b Presumed UTF-8 encoded byte array. + * @param off offset into array + * @param len length of utf-8 sequence + * @return String made from b or null + */ + public static String toString(final byte [] b, int off, int len) { + if (b == null) { + return null; + } + if (len == 0) { + return ""; + } + return new String(b, off, len, StandardCharsets.UTF_8); + } + + + /** + * Converts a string to a UTF-8 byte array. + * @param s string + * @return the byte array + */ + public static byte[] toBytes(String s) { + return s.getBytes(StandardCharsets.UTF_8); + } + + /** + * Convert a boolean to a byte array. True becomes -1 + * and false becomes 0. + * + * @param b value + * @return b encoded in a byte array. + */ + public static byte [] toBytes(final boolean b) { + return new byte[] { b ? (byte) -1 : (byte) 0 }; + } + + /** + * Reverses {@link #toBytes(boolean)} + * @param b array + * @return True or false. + */ + public static boolean toBoolean(final byte [] b) { + if (b.length != 1) { + throw new IllegalArgumentException("Array has wrong size: " + b.length); + } + return b[0] != (byte) 0; + } + + /** + * Convert a long value to a byte array using big-endian. + * + * @param val value to convert + * @return the byte array + */ + public static byte[] toBytes(long val) { + byte [] b = new byte[8]; + for (int i = 7; i > 0; i--) { + b[i] = (byte) val; + val >>>= 8; + } + b[0] = (byte) val; + return b; + } + + /** + * Converts a byte array to a long value. Reverses + * {@link #toBytes(long)} + * @param bytes array + * @return the long value + */ + public static long toLong(byte[] bytes) { + return toLong(bytes, 0, SIZEOF_LONG); + } + + /** + * Converts a byte array to a long value. Assumes there will be + * {@link #SIZEOF_LONG} bytes available. + * + * @param bytes bytes + * @param offset offset + * @return the long value + */ + public static long toLong(byte[] bytes, int offset) { + return toLong(bytes, offset, SIZEOF_LONG); + } + + /** + * Converts a byte array to a long value. + * + * @param bytes array of bytes + * @param offset offset into array + * @param length length of data (must be {@link #SIZEOF_LONG}) + * @return the long value + * @throws IllegalArgumentException if length is not {@link #SIZEOF_LONG} or + * if there's not enough room in the array at the offset indicated. + */ + public static long toLong(byte[] bytes, int offset, final int length) { + if (length != SIZEOF_LONG || offset + length > bytes.length) { + throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_LONG); + } + long l = 0; + for(int i = offset; i < offset + length; i++) { + l <<= 8; + l ^= bytes[i] & 0xFF; + } + return l; + } + + private static IllegalArgumentException + explainWrongLengthOrOffset(final byte[] bytes, + final int offset, + final int length, + final int expectedLength) { + String reason; + if (length != expectedLength) { + reason = "Wrong length: " + length + ", expected " + expectedLength; + } else { + reason = "offset (" + offset + ") + length (" + length + ") exceed the" + + " capacity of the array: " + bytes.length; + } + return new IllegalArgumentException(reason); + } + + /** + * Put a long value out to the specified byte array position. + * @param bytes the byte array + * @param offset position in the array + * @param val long to write out + * @return incremented offset + * @throws IllegalArgumentException if the byte array given doesn't have + * enough room at the offset specified. + */ + public static int putLong(byte[] bytes, int offset, long val) { + if (bytes.length - offset < SIZEOF_LONG) { + throw new IllegalArgumentException("Not enough room to put a long at" + + " offset " + offset + " in a " + bytes.length + " byte array"); + } + for(int i = offset + 7; i > offset; i--) { + bytes[i] = (byte) val; + val >>>= 8; + } + bytes[offset] = (byte) val; + return offset + SIZEOF_LONG; + } + + /** + * Presumes float encoded as IEEE 754 floating-point "single format" + * @param bytes byte array + * @return Float made from passed byte array. + */ + public static float toFloat(byte [] bytes) { + return toFloat(bytes, 0); + } + + /** + * Presumes float encoded as IEEE 754 floating-point "single format" + * @param bytes array to convert + * @param offset offset into array + * @return Float made from passed byte array. + */ + public static float toFloat(byte [] bytes, int offset) { + return Float.intBitsToFloat(toInt(bytes, offset, SIZEOF_INT)); + } + + /** + * @param bytes byte array + * @param offset offset to write to + * @param f float value + * @return New offset in bytes + */ + public static int putFloat(byte [] bytes, int offset, float f) { + return putInt(bytes, offset, Float.floatToRawIntBits(f)); + } + + /** + * @param f float value + * @return the float represented as byte [] + */ + public static byte [] toBytes(final float f) { + // Encode it as int + return Bytes.toBytes(Float.floatToRawIntBits(f)); + } + + /** + * @param bytes byte array + * @return Return double made from passed bytes. + */ + public static double toDouble(final byte [] bytes) { + return toDouble(bytes, 0); + } + + /** + * @param bytes byte array + * @param offset offset where double is + * @return Return double made from passed bytes. + */ + public static double toDouble(final byte [] bytes, final int offset) { + return Double.longBitsToDouble(toLong(bytes, offset, SIZEOF_LONG)); + } + + /** + * @param bytes byte array + * @param offset offset to write to + * @param d value + * @return New offset into array bytes + */ + public static int putDouble(byte [] bytes, int offset, double d) { + return putLong(bytes, offset, Double.doubleToLongBits(d)); + } + + /** + * Serialize a double as the IEEE 754 double format output. The resultant + * array will be 8 bytes long. + * + * @param d value + * @return the double represented as byte [] + */ + public static byte [] toBytes(final double d) { + // Encode it as a long + return Bytes.toBytes(Double.doubleToRawLongBits(d)); + } + + /** + * Convert an int value to a byte array + * @param val value + * @return the byte array + */ + public static byte[] toBytes(int val) { + byte [] b = new byte[4]; + for(int i = 3; i > 0; i--) { + b[i] = (byte) val; + val >>>= 8; + } + b[0] = (byte) val; + return b; + } + + /** + * Converts a byte array to an int value + * @param bytes byte array + * @return the int value + */ + public static int toInt(byte[] bytes) { + return toInt(bytes, 0, SIZEOF_INT); + } + + /** + * Converts a byte array to an int value + * @param bytes byte array + * @param offset offset into array + * @return the int value + */ + public static int toInt(byte[] bytes, int offset) { + return toInt(bytes, offset, SIZEOF_INT); + } + + /** + * Converts a byte array to an int value + * @param bytes byte array + * @param offset offset into array + * @param length length of int (has to be {@link #SIZEOF_INT}) + * @return the int value + * @throws IllegalArgumentException if length is not {@link #SIZEOF_INT} or + * if there's not enough room in the array at the offset indicated. + */ + public static int toInt(byte[] bytes, int offset, final int length) { + if (length != SIZEOF_INT || offset + length > bytes.length) { + throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_INT); + } + int n = 0; + for(int i = offset; i < (offset + length); i++) { + n <<= 8; + n ^= bytes[i] & 0xFF; + } + return n; + } + + /** + * Put an int value out to the specified byte array position. + * @param bytes the byte array + * @param offset position in the array + * @param val int to write out + * @return incremented offset + * @throws IllegalArgumentException if the byte array given doesn't have + * enough room at the offset specified. + */ + public static int putInt(byte[] bytes, int offset, int val) { + if (bytes.length - offset < SIZEOF_INT) { + throw new IllegalArgumentException("Not enough room to put an int at" + + " offset " + offset + " in a " + bytes.length + " byte array"); + } + for(int i= offset + 3; i > offset; i--) { + bytes[i] = (byte) val; + val >>>= 8; + } + bytes[offset] = (byte) val; + return offset + SIZEOF_INT; + } + + /** + * Convert a short value to a byte array of {@link #SIZEOF_SHORT} bytes long. + * @param val value + * @return the byte array + */ + public static byte[] toBytes(short val) { + byte[] b = new byte[SIZEOF_SHORT]; + b[1] = (byte) val; + val >>= 8; + b[0] = (byte) val; + return b; + } + + /** + * Converts a byte array to a short value + * @param bytes byte array + * @return the short value + */ + public static short toShort(byte[] bytes) { + return toShort(bytes, 0, SIZEOF_SHORT); + } + + /** + * Converts a byte array to a short value + * @param bytes byte array + * @param offset offset into array + * @return the short value + */ + public static short toShort(byte[] bytes, int offset) { + return toShort(bytes, offset, SIZEOF_SHORT); + } + + /** + * Converts a byte array to a short value + * @param bytes byte array + * @param offset offset into array + * @param length length, has to be {@link #SIZEOF_SHORT} + * @return the short value + * @throws IllegalArgumentException if length is not {@link #SIZEOF_SHORT} + * or if there's not enough room in the array at the offset indicated. + */ + public static short toShort(byte[] bytes, int offset, final int length) { + if (length != SIZEOF_SHORT || offset + length > bytes.length) { + throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_SHORT); + } + short n = 0; + n ^= bytes[offset] & 0xFF; + n <<= 8; + n ^= bytes[offset+1] & 0xFF; + return n; + } + + /** + * This method will get a sequence of bytes from pos -> limit, + * but will restore pos after. + * @param buf + * @return byte array + */ + public static byte[] getBytes(ByteBuffer buf) { + int savedPos = buf.position(); + byte [] newBytes = new byte[buf.remaining()]; + buf.get(newBytes); + buf.position(savedPos); + return newBytes; + } + + /** + * Put a short value out to the specified byte array position. + * @param bytes the byte array + * @param offset position in the array + * @param val short to write out + * @return incremented offset + * @throws IllegalArgumentException if the byte array given doesn't have + * enough room at the offset specified. + */ + public static int putShort(byte[] bytes, int offset, short val) { + if (bytes.length - offset < SIZEOF_SHORT) { + throw new IllegalArgumentException("Not enough room to put a short at" + + " offset " + offset + " in a " + bytes.length + " byte array"); + } + bytes[offset+1] = (byte) val; + val >>= 8; + bytes[offset] = (byte) val; + return offset + SIZEOF_SHORT; + } + + /** + * Convert a BigDecimal value to a byte array + * + * @param val + * @return the byte array + */ + public static byte[] toBytes(BigDecimal val) { + byte[] valueBytes = val.unscaledValue().toByteArray(); + byte[] result = new byte[valueBytes.length + SIZEOF_INT]; + int offset = putInt(result, 0, val.scale()); + putBytes(result, offset, valueBytes, 0, valueBytes.length); + return result; + } + + + /** + * Converts a byte array to a BigDecimal + * + * @param bytes + * @return the char value + */ + public static BigDecimal toBigDecimal(byte[] bytes) { + return toBigDecimal(bytes, 0, bytes.length); + } + + /** + * Converts a byte array to a BigDecimal value + * + * @param bytes + * @param offset + * @param length + * @return the char value + */ + public static BigDecimal toBigDecimal(byte[] bytes, int offset, final int length) { + if (bytes == null || length < SIZEOF_INT + 1 || + (offset + length > bytes.length)) { + return null; + } + + int scale = toInt(bytes, offset); + byte[] tcBytes = new byte[length - SIZEOF_INT]; + System.arraycopy(bytes, offset + SIZEOF_INT, tcBytes, 0, length - SIZEOF_INT); + return new BigDecimal(new BigInteger(tcBytes), scale); + } + + /** + * Put a BigDecimal value out to the specified byte array position. + * + * @param bytes the byte array + * @param offset position in the array + * @param val BigDecimal to write out + * @return incremented offset + */ + public static int putBigDecimal(byte[] bytes, int offset, BigDecimal val) { + if (bytes == null) { + return offset; + } + + byte[] valueBytes = val.unscaledValue().toByteArray(); + byte[] result = new byte[valueBytes.length + SIZEOF_INT]; + offset = putInt(result, offset, val.scale()); + return putBytes(result, offset, valueBytes, 0, valueBytes.length); + } + + /** + * @param a lower half + * @param b upper half + * @return New array that has a in lower half and b in upper half. + */ + public static byte [] add(final byte [] a, final byte [] b) { + return add(a, b, EMPTY_BYTE_ARRAY); + } + + /** + * @param a first third + * @param b second third + * @param c third third + * @return New array made from a, b and c + */ + public static byte [] add(final byte [] a, final byte [] b, final byte [] c) { + byte [] result = new byte[a.length + b.length + c.length]; + System.arraycopy(a, 0, result, 0, a.length); + System.arraycopy(b, 0, result, a.length, b.length); + System.arraycopy(c, 0, result, a.length + b.length, c.length); + return result; + } + + /** + * @param a array + * @param length amount of bytes to grab + * @return First length bytes from a + */ + public static byte [] head(final byte [] a, final int length) { + if (a.length < length) { + return null; + } + byte [] result = new byte[length]; + System.arraycopy(a, 0, result, 0, length); + return result; + } + + /** + * @param a array + * @param length amount of bytes to snarf + * @return Last length bytes from a + */ + public static byte [] tail(final byte [] a, final int length) { + if (a.length < length) { + return null; + } + byte [] result = new byte[length]; + System.arraycopy(a, a.length - length, result, 0, length); + return result; + } + + /** + * @param a array + * @param length new array size + * @return Value in a plus length prepended 0 bytes + */ + public static byte [] padHead(final byte [] a, final int length) { + byte[] padding = getPadding(length); + return add(padding,a); + } + + private static byte[] getPadding(int length) { + byte[] padding = new byte[length]; + for (int i = 0; i < length; i++) { + padding[i] = 0; + } + return padding; + } + + /** + * @param a array + * @param length new array size + * @return Value in a plus length appended 0 bytes + */ + public static byte [] padTail(final byte [] a, final int length) { + byte[] padding = getPadding(length); + return add(a,padding); + } + + + + /** + * @param bytes array to hash + * @param offset offset to start from + * @param length length to hash + * */ + public static int hashCode(byte[] bytes, int offset, int length) { + int hash = 1; + for (int i = offset; i < offset + length; i++) { + hash = (31 * hash) + (int) bytes[i]; + } + return hash; + } + + /** + * @param t operands + * @return Array of byte arrays made from passed array of Text + */ + public static byte [][] toByteArrays(final String [] t) { + byte [][] result = new byte[t.length][]; + for (int i = 0; i < t.length; i++) { + result[i] = Bytes.toBytes(t[i]); + } + return result; + } + + /** + * @param column operand + * @return A byte array of a byte array where first and only entry is + * column + */ + public static byte [][] toByteArrays(final String column) { + return toByteArrays(toBytes(column)); + } + + /** + * @param column operand + * @return A byte array of a byte array where first and only entry is + * column + */ + public static byte [][] toByteArrays(final byte [] column) { + byte [][] result = new byte[1][]; + result[0] = column; + return result; + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java new file mode 100644 index 0000000000..470242359e --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java @@ -0,0 +1,292 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + + +import org.apache.commons.collections.BeanMap; +import org.apache.commons.lang.StringUtils; + +import java.util.*; + + +/** + * Provides utility methods and decorators for {@link Collection} instances. + *

+ * Various utility methods might put the input objects into a Set/Map/Bag. In case + * the input objects override {@link Object#equals(Object)}, it is mandatory that + * the general contract of the {@link Object#hashCode()} method is maintained. + *

+ * NOTE: From 4.0, method parameters will take {@link Iterable} objects when possible. + * + * @version $Id: CollectionUtils.java 1686855 2015-06-22 13:00:27Z tn $ + * @since 1.0 + */ +public class CollectionUtils { + + /** + * Returns a new {@link Collection} containing a minus a subset of + * b. Only the elements of b that satisfy the predicate + * condition, p are subtracted from a. + *

+ *

The cardinality of each element e in the returned {@link Collection} + * that satisfies the predicate condition will be the cardinality of e in a + * minus the cardinality of e in b, or zero, whichever is greater.

+ *

The cardinality of each element e in the returned {@link Collection} that does not + * satisfy the predicate condition will be equal to the cardinality of e in a.

+ * + * @param a the collection to subtract from, must not be null + * @param b the collection to subtract, must not be null + * @return a new collection with the results + * @see Collection#removeAll + */ + public static Collection subtract(Set a, Set b) { + return org.apache.commons.collections4.CollectionUtils.subtract(a, b); + } + + public static boolean isNotEmpty(Collection coll) { + return !isEmpty(coll); + } + + public static boolean isEmpty(Collection coll) { + return coll == null || coll.isEmpty(); + } + + /** + * String to map + * + * @param str string + * @param separator separator + * @return + */ + public static Map stringToMap(String str, String separator) { + return stringToMap(str, separator, ""); + } + + /** + * String to map + * + * @param str string + * @param separator separator + * @param keyPrefix prefix + * @return + */ + public static Map stringToMap(String str, String separator, String keyPrefix) { + if (null == str || "".equals(str)) { + return null; + } + if (null == separator || "".equals(separator)) { + return null; + } + String[] strings = str.split(separator); + int mapLength = strings.length; + if ((strings.length % 2) != 0) { + mapLength = mapLength + 1; + } + + Map map = new HashMap<>(mapLength); + for (int i = 0; i < strings.length; i++) { + String[] strArray = strings[i].split("="); + //strArray[0] KEY strArray[1] VALUE + if (StringUtils.isEmpty(keyPrefix)) { + map.put(strArray[0], strArray[1]); + } else { + map.put(keyPrefix + strArray[0], strArray[1]); + } + } + return map; + } + + + /** + * Helper class to easily access cardinality properties of two collections. + * + * @param the element type + */ + private static class CardinalityHelper { + + /** + * Contains the cardinality for each object in collection A. + */ + final Map cardinalityA; + + /** + * Contains the cardinality for each object in collection B. + */ + final Map cardinalityB; + + /** + * Create a new CardinalityHelper for two collections. + * + * @param a the first collection + * @param b the second collection + */ + public CardinalityHelper(final Iterable a, final Iterable b) { + cardinalityA = CollectionUtils.getCardinalityMap(a); + cardinalityB = CollectionUtils.getCardinalityMap(b); + } + + /** + * Returns the maximum frequency of an object. + * + * @param obj the object + * @return the maximum frequency of the object + */ + public final int max(final Object obj) { + return Math.max(freqA(obj), freqB(obj)); + } + + /** + * Returns the minimum frequency of an object. + * + * @param obj the object + * @return the minimum frequency of the object + */ + public final int min(final Object obj) { + return Math.min(freqA(obj), freqB(obj)); + } + + /** + * Returns the frequency of this object in collection A. + * + * @param obj the object + * @return the frequency of the object in collection A + */ + public int freqA(final Object obj) { + return getFreq(obj, cardinalityA); + } + + /** + * Returns the frequency of this object in collection B. + * + * @param obj the object + * @return the frequency of the object in collection B + */ + public int freqB(final Object obj) { + return getFreq(obj, cardinalityB); + } + + private final int getFreq(final Object obj, final Map freqMap) { + final Integer count = freqMap.get(obj); + if (count != null) { + return count.intValue(); + } + return 0; + } + } + + /** + * returns {@code true} iff the given {@link Collection}s contain + * exactly the same elements with exactly the same cardinalities. + * + * @param a the first collection + * @param b the second collection + * @return Returns true iff the given Collections contain exactly the same elements with exactly the same cardinalities. + * That is, iff the cardinality of e in a is equal to the cardinality of e in b, for each element e in a or b. + */ + public static boolean equalLists(Collection a, Collection b) { + if (a == null && b == null) { + return true; + } + + if ((a == null && b != null) || a != null && b == null) { + return false; + } + + return isEqualCollection(a, b); + } + + /** + * Returns {@code true} iff the given {@link Collection}s contain + * exactly the same elements with exactly the same cardinalities. + *

+ * That is, iff the cardinality of e in a is + * equal to the cardinality of e in b, + * for each element e in a or b. + * + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @return true iff the collections contain the same elements with the same cardinalities. + */ + public static boolean isEqualCollection(final Collection a, final Collection b) { + if (a.size() != b.size()) { + return false; + } + final CardinalityHelper helper = new CardinalityHelper(a, b); + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { + return false; + } + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { + return false; + } + } + return true; + } + + /** + * Returns a {@link Map} mapping each unique element in the given + * {@link Collection} to an {@link Integer} representing the number + * of occurrences of that element in the {@link Collection}. + *

+ * Only those elements present in the collection will appear as + * keys in the map. + * + * @param the type of object in the returned {@link Map}. This is a super type of . + * @param coll the collection to get the cardinality map for, must not be null + * @return the populated cardinality map + */ + public static Map getCardinalityMap(final Iterable coll) { + final Map count = new HashMap(); + for (final O obj : coll) { + final Integer c = count.get(obj); + if (c == null) { + count.put(obj, Integer.valueOf(1)); + } else { + count.put(obj, Integer.valueOf(c.intValue() + 1)); + } + } + return count; + } + + + /** + * Removes certain attributes of each object in the list + * @param originList + * @param exclusionSet + * @param + * @return + */ + public static List> getListByExclusion(List originList, Set exclusionSet) { + List> instanceList = new ArrayList<>(); + Map instanceMap; + for (T instance : originList) { + Map dataMap = new BeanMap(instance); + instanceMap = new LinkedHashMap<>(16,0.75f,true); + for (Object key : dataMap.keySet()) { + + if (exclusionSet.contains(key.toString())) { + continue; + } + instanceMap.put(key.toString(), dataMap.get(key)); + + } + instanceList.add(instanceMap); + } + return instanceList; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java new file mode 100644 index 0000000000..28470ef57d --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ResUploadType; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; + +/** + * common utils + */ +public class CommonUtils { + + private static final Logger logger = LoggerFactory.getLogger(CommonUtils.class); + + /** + * @return get the path of system environment variables + */ + public static String getSystemEnvPath() { + String envPath = PropertyUtils.getString(Constants.ESCHEDULER_ENV_PATH); + if (StringUtils.isEmpty(envPath)) { + envPath = System.getProperty("user.home") + File.separator + ".bash_profile"; + } + + return envPath; + } + + /** + * @return get queue implementation name + */ + public static String getQueueImplValue(){ + return PropertyUtils.getString(Constants.SCHEDULER_QUEUE_IMPL); + } + + /** + * + * @return is develop mode + */ + public static boolean isDevelopMode() { + return PropertyUtils.getBoolean(Constants.DEVELOPMENT_STATE); + } + + + + /** + * if upload resource is HDFS and kerberos startup is true , else false + * @return + */ + public static boolean getKerberosStartupState(){ + String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE); + ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); + Boolean kerberosStartupState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE); + return resUploadType == ResUploadType.HDFS && kerberosStartupState; + } + + /** + * load kerberos configuration + * @throws Exception + */ + public static void loadKerberosConf()throws Exception{ + if (CommonUtils.getKerberosStartupState()) { + System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)); + Configuration configuration = new Configuration(); + configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, Constants.KERBEROS); + UserGroupInformation.setConfiguration(configuration); + UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME), + PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH)); + } + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java new file mode 100644 index 0000000000..2bbb45b64c --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.*; + +public class ConnectionUtils { + + public static final Logger logger = LoggerFactory.getLogger(ConnectionUtils.class); + + private static ConnectionUtils instance; + + ConnectionUtils() { + } + + public static ConnectionUtils getInstance() { + if (null == instance) { + syncInit(); + } + return instance; + } + + private static synchronized void syncInit() { + if (instance == null) { + instance = new ConnectionUtils(); + } + } + + public void release(ResultSet rs, Statement stmt, Connection conn) { + try { + if (rs != null) { + rs.close(); + rs = null; + } + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e); + } finally { + try { + if (stmt != null) { + stmt.close(); + stmt = null; + } + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e); + } finally { + try { + if (conn != null) { + conn.close(); + conn = null; + } + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e); + } + } + } + } + + public static void releaseResource(ResultSet rs, PreparedStatement ps, Connection conn) { + ConnectionUtils.getInstance().release(rs,ps,conn); + if (null != rs) { + try { + rs.close(); + } catch (SQLException e) { + logger.error(e.getMessage(),e); + } + } + + if (null != ps) { + try { + ps.close(); + } catch (SQLException e) { + logger.error(e.getMessage(),e); + } + } + + if (null != conn) { + try { + conn.close(); + } catch (SQLException e) { + logger.error(e.getMessage(),e); + } + } + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java new file mode 100644 index 0000000000..c927a226e9 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java @@ -0,0 +1,356 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.Calendar; +import java.util.Date; + +/** + * date utils + */ +public class DateUtils { + + private static final Logger logger = LoggerFactory.getLogger(DateUtils.class); + + /** + * java.util.Date to java.time.LocalDateTime + * use default zone + * @param date + * @return + */ + private static LocalDateTime date2LocalDateTime(Date date) { + return LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault()); + } + + /** + * java.time.LocalDateTime to java.util.Date + * use default zone + * @param localDateTime + * @return + */ + private static Date localDateTime2Date(LocalDateTime localDateTime) { + Instant instant = localDateTime.atZone(ZoneId.systemDefault()).toInstant(); + return Date.from(instant); + } + + /** + * @return get the formatted date string for the current time + */ + public static String getCurrentTime() { + return getCurrentTime(Constants.YYYY_MM_DD_HH_MM_SS); + } + + /** + * @param format + * @return get the date string in the specified format of the current time + */ + public static String getCurrentTime(String format) { +// return new SimpleDateFormat(format).format(new Date()); + return LocalDateTime.now().format(DateTimeFormatter.ofPattern(format)); + } + + /** + * @param date + * @param format e.g. yyyy-MM-dd HH:mm:ss + * @return get the formatted date string + */ + public static String format(Date date, String format) { +// return new SimpleDateFormat(format).format(date); + return format(date2LocalDateTime(date), format); + } + + /** + * @param localDateTime + * @param format e.g. yyyy-MM-dd HH:mm:ss + * @return get the formatted date string + */ + public static String format(LocalDateTime localDateTime, String format) { + return localDateTime.format(DateTimeFormatter.ofPattern(format)); + } + + /** + * @param date + * @return convert time to yyyy-MM-dd HH:mm:ss format + */ + public static String dateToString(Date date) { + return format(date, Constants.YYYY_MM_DD_HH_MM_SS); + } + + + /** + * @param date + * @return convert string to date and time + */ + public static Date parse(String date, String format) { + try { + // return new SimpleDateFormat(format).parse(date); + LocalDateTime ldt = LocalDateTime.parse(date, DateTimeFormatter.ofPattern(format)); + return localDateTime2Date(ldt); + } catch (Exception e) { + logger.error("error while parse date:" + date, e); + } + return null; + } + + /** + * convert date str to yyyy-MM-dd HH:mm:ss format + * + * @param str + * @return + */ + public static Date stringToDate(String str) { + return parse(str, Constants.YYYY_MM_DD_HH_MM_SS); + } + + /** + * get seconds between two dates + * + * @param d1 + * @param d2 + * @return + */ + public static long differSec(Date d1, Date d2) { + return (long) Math.ceil(differMs(d1, d2) / 1000.0); + } + + /** + * get ms between two dates + * + * @param d1 + * @param d2 + * @return + */ + public static long differMs(Date d1, Date d2) { + return Math.abs(d1.getTime() - d2.getTime()); + } + + + /** + * get hours between two dates + * + * @param d1 + * @param d2 + * @return + */ + public static long diffHours(Date d1, Date d2) { + return (long) Math.ceil(diffMin(d1, d2) / 60.0); + } + + /** + * get minutes between two dates + * + * @param d1 + * @param d2 + * @return + */ + public static long diffMin(Date d1, Date d2) { + return (long) Math.ceil(differSec(d1, d2) / 60.0); + } + + + /** + * get the date of the specified date in the days before and after + * + * @param date + * @param day + * @return + */ + public static Date getSomeDay(Date date, int day) { + Calendar calendar = Calendar.getInstance(); + calendar.setTime(date); + calendar.add(Calendar.DATE, day); + return calendar.getTime(); + } + + /** + * compare two dates + * + * @param future + * @param old + * @return + */ + public static boolean compare(Date future, Date old) { + return future.getTime() > old.getTime(); + } + + /** + * convert schedule string to date + * + * @param schedule + * @return + */ + public static Date getScheduleDate(String schedule) { + return stringToDate(schedule); + } + + /** + * format time to readable + * + * @param ms + * @return + */ + public static String format2Readable(long ms) { + + long days = ms / (1000 * 60 * 60 * 24); + long hours = (ms % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60); + long minutes = (ms % (1000 * 60 * 60)) / (1000 * 60); + long seconds = (ms % (1000 * 60)) / 1000; + + return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds); + + } + + /** + * get monday + *

+ * note: Set the first day of the week to Monday, the default is Sunday + */ + public static Date getMonday(Date date) { + Calendar cal = Calendar.getInstance(); + + cal.setTime(date); + + cal.setFirstDayOfWeek(Calendar.MONDAY); + cal.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); + + return cal.getTime(); + } + + /** + * get sunday + *

+ * note: Set the first day of the week to Monday, the default is Sunday + */ + public static Date getSunday(Date date) { + Calendar cal = Calendar.getInstance(); + cal.setTime(date); + + cal.setFirstDayOfWeek(Calendar.MONDAY); + cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY); + + return cal.getTime(); + } + + /** + * get first day of month + */ + public static Date getFirstDayOfMonth(Date date) { + Calendar cal = Calendar.getInstance(); + + cal.setTime(date); + cal.set(Calendar.DAY_OF_MONTH, 1); + + return cal.getTime(); + } + + /** + * get first day of month + */ + public static Date getSomeHourOfDay(Date date, int hours) { + Calendar cal = Calendar.getInstance(); + + cal.setTime(date); + cal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY) - hours); + cal.set(Calendar.MINUTE, 0); + cal.set(Calendar.SECOND, 0); + + return cal.getTime(); + } + + /** + * get last day of month + */ + public static Date getLastDayOfMonth(Date date) { + Calendar cal = Calendar.getInstance(); + + cal.setTime(date); + + cal.add(Calendar.MONTH, 1); + cal.set(Calendar.DAY_OF_MONTH, 1); + cal.add(Calendar.DAY_OF_MONTH, -1); + + return cal.getTime(); + } + + /** + * return YYYY-MM-DD 00:00:00 + * + * @param inputDay + * @return + */ + public static Date getStartOfDay(Date inputDay) { + Calendar cal = Calendar.getInstance(); + cal.setTime(inputDay); + cal.set(Calendar.HOUR_OF_DAY, 0); + cal.set(Calendar.MINUTE, 0); + cal.set(Calendar.SECOND, 0); + return cal.getTime(); + } + + /** + * return YYYY-MM-DD 23:59:59 + * + * @param inputDay + * @return + */ + public static Date getEndOfDay(Date inputDay) { + Calendar cal = Calendar.getInstance(); + cal.setTime(inputDay); + cal.set(Calendar.HOUR_OF_DAY, 23); + cal.set(Calendar.MINUTE, 59); + cal.set(Calendar.SECOND, 59); + return cal.getTime(); + } + + /** + * return YYYY-MM-DD 00:00:00 + * + * @param inputDay + * @return + */ + public static Date getStartOfHour(Date inputDay) { + Calendar cal = Calendar.getInstance(); + cal.setTime(inputDay); + cal.set(Calendar.MINUTE, 0); + cal.set(Calendar.SECOND, 0); + return cal.getTime(); + } + + /** + * return YYYY-MM-DD 23:59:59 + * + * @param inputDay + * @return + */ + public static Date getEndOfHour(Date inputDay) { + Calendar cal = Calendar.getInstance(); + cal.setTime(inputDay); + cal.set(Calendar.MINUTE, 59); + cal.set(Calendar.SECOND, 59); + return cal.getTime(); + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DependentUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DependentUtils.java new file mode 100644 index 0000000000..ee3c6260c6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DependentUtils.java @@ -0,0 +1,144 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.DependentRelation; +import org.apache.dolphinscheduler.common.model.DateInterval; +import org.apache.dolphinscheduler.common.utils.dependent.DependentDateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +public class DependentUtils { + + private static final Logger logger = LoggerFactory.getLogger(DependentUtils.class); + + public static DependResult getDependResultForRelation(DependentRelation relation, + List dependResultList){ + + DependResult dependResult = DependResult.SUCCESS; + + switch (relation){ + case AND: + if(dependResultList.contains(DependResult.FAILED)){ + dependResult = DependResult.FAILED; + }else if(dependResultList.contains(DependResult.WAITING)){ + dependResult = DependResult.WAITING; + }else{ + dependResult = DependResult.SUCCESS; + } + break; + case OR: + if(dependResultList.contains(DependResult.SUCCESS)){ + dependResult = DependResult.SUCCESS; + }else if(dependResultList.contains(DependResult.WAITING)){ + dependResult = DependResult.WAITING; + }else{ + dependResult = DependResult.FAILED; + } + break; + default: + break; + } + return dependResult; + } + + + /** + * get date interval list by business date and date value. + * @param businessDate + * @param dateValue + * @return + */ + public static List getDateIntervalList(Date businessDate, String dateValue){ + List result = new ArrayList<>(); + switch (dateValue){ + case "last1Hour": + result = DependentDateUtils.getLastHoursInterval(businessDate, 1); + break; + case "last2Hours": + result = DependentDateUtils.getLastHoursInterval(businessDate, 2); + break; + case "last3Hours": + result = DependentDateUtils.getLastHoursInterval(businessDate, 3); + break; + case "today": + result = DependentDateUtils.getTodayInterval(businessDate); + break; + case "last1Days": + result = DependentDateUtils.getLastDayInterval(businessDate, 1); + break; + case "last2Days": + result = DependentDateUtils.getLastDayInterval(businessDate, 2); + break; + case "last3Days": + result = DependentDateUtils.getLastDayInterval(businessDate, 3); + break; + case "last7Days": + result = DependentDateUtils.getLastDayInterval(businessDate, 7); + break; + case "thisWeek": + result = DependentDateUtils.getThisWeekInterval(businessDate); + break; + case "lastWeek": + result = DependentDateUtils.getLastWeekInterval(businessDate); + break; + case "lastMonday": + result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 1); + break; + case "lastTuesday": + result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 2); + break; + case "lastWednesday": + result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 3); + break; + case "lastThursday": + result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 4); + break; + case "lastFriday": + result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 5); + break; + case "lastSaturday": + result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 6); + break; + case "lastSunday": + result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 7); + break; + case "thisMonth": + result = DependentDateUtils.getThisMonthInterval(businessDate); + break; + case "lastMonth": + result = DependentDateUtils.getLastMonthInterval(businessDate); + break; + case "lastMonthBegin": + result = DependentDateUtils.getLastMonthBeginInterval(businessDate, true); + break; + case "lastMonthEnd": + result = DependentDateUtils.getLastMonthBeginInterval(businessDate, false); + break; + default: + break; + } + return result; + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EncryptionUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EncryptionUtils.java new file mode 100644 index 0000000000..589827a975 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EncryptionUtils.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.lang3.StringUtils; + +/** + * encryption utils + */ +public class EncryptionUtils { + + + /** + * + * @param rawStr + * @return md5(rawStr) + */ + public static String getMd5(String rawStr) { + return DigestUtils.md5Hex(null == rawStr ? StringUtils.EMPTY : rawStr); + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EnumFieldUtil.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EnumFieldUtil.java new file mode 100644 index 0000000000..7c4a54114c --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EnumFieldUtil.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +/** + * enum field util + */ +public class EnumFieldUtil { + /** + * Generate a string for the enums field + * + * @param field + * @param enumClass + * @return + */ + public static String genFieldStr(String field, Class enumClass) { + //TODO... + // delete this class when mybatisplus is ok + return ""; +// return "#{" + field + ",javaType=" + enumClass.getName() + ",typeHandler=" + EnumOrdinalTypeHandler.class.getName() + "}"; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java new file mode 100644 index 0000000000..1b535dcca8 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java @@ -0,0 +1,427 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.commons.io.Charsets; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.nio.charset.Charset; +import java.nio.charset.UnsupportedCharsetException; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * file utils + */ +public class FileUtils { + public static final Logger logger = LoggerFactory.getLogger(FileUtils.class); + + /** + * get file suffix + * + * @param filename + * @return file suffix + */ + public static String suffix(String filename) { + + String fileSuffix = ""; + if (StringUtils.isNotEmpty(filename)) { + int lastIndex = filename.lastIndexOf("."); + if (lastIndex > 0) { + fileSuffix = filename.substring(lastIndex + 1); + } + } + return fileSuffix; + } + + /** + * get download file absolute path and name + * + * @param filename + * @return download file name + */ + public static String getDownloadFilename(String filename) { + return String.format("%s/%s/%s", PropertyUtils.getString(DATA_DOWNLOAD_BASEDIR_PATH), DateUtils.getCurrentTime(YYYYMMDDHHMMSS), filename); + } + + /** + * get upload file absolute path and name + * + * @param tenantCode tenant code + * @param filename file name + * @return local file path + */ + public static String getUploadFilename(String tenantCode, String filename) { + return String.format("%s/%s/resources/%s", PropertyUtils.getString(DATA_BASEDIR_PATH), tenantCode, filename); + } + + /** + * directory of process execution + * @param projectId + * @param processDefineId + * @param processInstanceId + * @param taskInstanceId + * @return directory of process execution + */ + public static String getProcessExecDir(int projectId, int processDefineId, int processInstanceId, int taskInstanceId) { + + return String.format("%s/process/%s/%s/%s/%s", PropertyUtils.getString(PROCESS_EXEC_BASEPATH), Integer.toString(projectId), + Integer.toString(processDefineId), Integer.toString(processInstanceId),Integer.toString(taskInstanceId)); + } + + /** + * directory of process instances + * @param projectId + * @param processDefineId + * @param processInstanceId + * @return directory of process instances + */ + public static String getProcessExecDir(int projectId, int processDefineId, int processInstanceId) { + return String.format("%s/process/%s/%s/%s", PropertyUtils.getString(PROCESS_EXEC_BASEPATH), Integer.toString(projectId), + Integer.toString(processDefineId), Integer.toString(processInstanceId)); + } + + /** + * @return get suffixes for resource files that support online viewing + */ + public static String getResourceViewSuffixs() { + return PropertyUtils.getString(RESOURCE_VIEW_SUFFIXS); + } + + /** + * create directory and user + * @param execLocalPath + * @param userName + * @param logger + * @throws IOException + */ + public static void createWorkDirAndUserIfAbsent(String execLocalPath, String userName, Logger logger) throws IOException{ + //if work dir exists, first delete + File execLocalPathFile = new File(execLocalPath); + + if (execLocalPathFile.exists()){ + org.apache.commons.io.FileUtils.forceDelete(execLocalPathFile); + } + + //create work dir + org.apache.commons.io.FileUtils.forceMkdir(execLocalPathFile); + + + //if not exists this user,then create + if (!OSUtils.getUserList().contains(userName)){ + String userGroup = OSUtils.getGroup(); + if (org.apache.commons.lang3.StringUtils.isNotEmpty(userGroup)){ + logger.info("create os user : {}",userName); + String cmd = String.format("sudo useradd -g %s %s",userGroup,userName); + + logger.info("execute cmd : {}",cmd); + OSUtils.exeCmd(cmd); + } + } + + } + + + /** + * write content to file ,if parent path not exists, it will do one's utmost to mkdir + * + * @param content content + * @param filePath target file path + * @return + */ + public static boolean writeContent2File(String content, String filePath) { + boolean flag = true; + BufferedReader bufferedReader = null; + BufferedWriter bufferedWriter = null; + try { + File distFile = new File(filePath); + if (!distFile.getParentFile().exists()) { + distFile.getParentFile().mkdirs(); + } + bufferedReader = new BufferedReader(new StringReader(content)); + bufferedWriter = new BufferedWriter(new FileWriter(distFile)); + char buf[] = new char[1024]; + int len; + while ((len = bufferedReader.read(buf)) != -1) { + bufferedWriter.write(buf, 0, len); + } + bufferedWriter.flush(); + bufferedReader.close(); + bufferedWriter.close(); + } catch (IOException e) { + FileUtils.logger.error(e.getMessage(), e); + flag = false; + return flag; + } finally { + IOUtils.closeQuietly(bufferedWriter); + IOUtils.closeQuietly(bufferedReader); + } + return flag; + } + + /** + * Writes a String to a file creating the file if it does not exist. + * + * NOTE: As from v1.3, the parent directories of the file will be created + * if they do not exist. + * + * @param file the file to write + * @param data the content to write to the file + * @param encoding the encoding to use, {@code null} means platform default + * @throws IOException in case of an I/O error + * @throws java.io.UnsupportedEncodingException if the encoding is not supported by the VM + * @since 2.4 + */ + public static void writeStringToFile(File file, String data, Charset encoding) throws IOException { + writeStringToFile(file, data, encoding, false); + } + + /** + * Writes a String to a file creating the file if it does not exist. + * + * NOTE: As from v1.3, the parent directories of the file will be created + * if they do not exist. + * + * @param file the file to write + * @param data the content to write to the file + * @param encoding the encoding to use, {@code null} means platform default + * @throws IOException in case of an I/O error + * @throws java.io.UnsupportedEncodingException if the encoding is not supported by the VM + */ + public static void writeStringToFile(File file, String data, String encoding) throws IOException { + writeStringToFile(file, data, encoding, false); + } + + /** + * Writes a String to a file creating the file if it does not exist. + * + * @param file the file to write + * @param data the content to write to the file + * @param encoding the encoding to use, {@code null} means platform default + * @param append if {@code true}, then the String will be added to the + * end of the file rather than overwriting + * @throws IOException in case of an I/O error + * @since 2.3 + */ + public static void writeStringToFile(File file, String data, Charset encoding, boolean append) throws IOException { + OutputStream out = null; + try { + out = openOutputStream(file, append); + IOUtils.write(data, out, encoding); + out.close(); // don't swallow close Exception if copy completes normally + } finally { + IOUtils.closeQuietly(out); + } + } + + /** + * Writes a String to a file creating the file if it does not exist. + * + * @param file the file to write + * @param data the content to write to the file + * @param encoding the encoding to use, {@code null} means platform default + * @param append if {@code true}, then the String will be added to the + * end of the file rather than overwriting + * @throws IOException in case of an I/O error + * @throws UnsupportedCharsetException + * thrown instead of {@link UnsupportedEncodingException} in version 2.2 if the encoding is not + * supported by the VM + * @since 2.1 + */ + public static void writeStringToFile(File file, String data, String encoding, boolean append) throws IOException { + writeStringToFile(file, data, Charsets.toCharset(encoding), append); + } + + /** + * Writes a String to a file creating the file if it does not exist using the default encoding for the VM. + * + * @param file the file to write + * @param data the content to write to the file + * @throws IOException in case of an I/O error + */ + public static void writeStringToFile(File file, String data) throws IOException { + writeStringToFile(file, data, Charset.defaultCharset(), false); + } + + /** + * Writes a String to a file creating the file if it does not exist using the default encoding for the VM. + * + * @param file the file to write + * @param data the content to write to the file + * @param append if {@code true}, then the String will be added to the + * end of the file rather than overwriting + * @throws IOException in case of an I/O error + * @since 2.1 + */ + public static void writeStringToFile(File file, String data, boolean append) throws IOException { + writeStringToFile(file, data, Charset.defaultCharset(), append); + } + + /** + * Opens a {@link FileOutputStream} for the specified file, checking and + * creating the parent directory if it does not exist. + *

+ * At the end of the method either the stream will be successfully opened, + * or an exception will have been thrown. + *

+ * The parent directory will be created if it does not exist. + * The file will be created if it does not exist. + * An exception is thrown if the file object exists but is a directory. + * An exception is thrown if the file exists but cannot be written to. + * An exception is thrown if the parent directory cannot be created. + * + * @param file the file to open for output, must not be {@code null} + * @return a new {@link FileOutputStream} for the specified file + * @throws IOException if the file object is a directory + * @throws IOException if the file cannot be written to + * @throws IOException if a parent directory needs creating but that fails + * @since 1.3 + */ + public static FileOutputStream openOutputStream(File file) throws IOException { + return openOutputStream(file, false); + } + + /** + * Opens a {@link FileOutputStream} for the specified file, checking and + * creating the parent directory if it does not exist. + *

+ * At the end of the method either the stream will be successfully opened, + * or an exception will have been thrown. + *

+ * The parent directory will be created if it does not exist. + * The file will be created if it does not exist. + * An exception is thrown if the file object exists but is a directory. + * An exception is thrown if the file exists but cannot be written to. + * An exception is thrown if the parent directory cannot be created. + * + * @param file the file to open for output, must not be {@code null} + * @param append if {@code true}, then bytes will be added to the + * end of the file rather than overwriting + * @return a new {@link FileOutputStream} for the specified file + * @throws IOException if the file object is a directory + * @throws IOException if the file cannot be written to + * @throws IOException if a parent directory needs creating but that fails + * @since 2.1 + */ + public static FileOutputStream openOutputStream(File file, boolean append) throws IOException { + if (file.exists()) { + if (file.isDirectory()) { + throw new IOException("File '" + file + "' exists but is a directory"); + } + if (file.canWrite() == false) { + throw new IOException("File '" + file + "' cannot be written to"); + } + } else { + File parent = file.getParentFile(); + if (parent != null) { + if (!parent.mkdirs() && !parent.isDirectory()) { + throw new IOException("Directory '" + parent + "' could not be created"); + } + } + } + return new FileOutputStream(file, append); + } + + + /** + * deletes a directory recursively + * @param dir + */ + + public static void deleteDir(String dir) throws IOException { + org.apache.commons.io.FileUtils.deleteDirectory(new File(dir)); + } + + /** + * Deletes a file. If file is a directory, delete it and all sub-directories. + *

+ * The difference between File.delete() and this method are: + *

    + *
  • A directory to be deleted does not have to be empty.
  • + *
  • You get exceptions when a file or directory cannot be deleted. + * (java.io.File methods returns a boolean)
  • + *
+ * + * @param filename + * @throws IOException in case deletion is unsuccessful + */ + public static void deleteFile(String filename) throws IOException { + org.apache.commons.io.FileUtils.forceDelete(new File(filename)); + } + + /** + * Gets all the parent subdirectories of the parentDir directory + * @param parentDir + * @return + */ + public static File[] getAllDir(String parentDir){ + if(parentDir == null || "".equals(parentDir)) { + throw new RuntimeException("parentDir can not be empty"); + } + + File file = new File(parentDir); + if(!file.exists() || !file.isDirectory()) { + throw new RuntimeException("parentDir not exist, or is not a directory:"+parentDir); + } + + File[] schemaDirs = file.listFiles(new FileFilter() { + + @Override + public boolean accept(File pathname) { + if (pathname.isDirectory()) { + return true; + } + else { + return false; + } + } + }); + + return schemaDirs; + } + + /** + * Get Content + * @param inputStream + * @return + * @throws IOException + */ + public static String readFile2Str(InputStream inputStream) throws IOException{ + String all_content=null; + try { + all_content = new String(); + InputStream ins = inputStream; + ByteArrayOutputStream outputstream = new ByteArrayOutputStream(); + byte[] str_b = new byte[1024]; + int i = -1; + while ((i=ins.read(str_b)) > 0) { + outputstream.write(str_b,0,i); + } + all_content = outputstream.toString(); + return all_content; + } catch (Exception e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e); + } + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java new file mode 100644 index 0000000000..2c28263ef4 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java @@ -0,0 +1,581 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.ResUploadType; +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONException; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.yarn.client.cli.RMAdminCLI; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.security.PrivilegedExceptionAction; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * hadoop utils + * single instance + */ +public class HadoopUtils implements Closeable { + + private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class); + + private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER); + private static volatile HadoopUtils instance = new HadoopUtils(); + private static volatile Configuration configuration; + private static FileSystem fs; + + + private HadoopUtils(){ + if(StringUtils.isEmpty(hdfsUser)){ + hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER); + } + init(); + initHdfsPath(); + } + + public static HadoopUtils getInstance(){ + // if kerberos startup , renew HadoopUtils + if (CommonUtils.getKerberosStartupState()){ + return new HadoopUtils(); + } + return instance; + } + + /** + * init escheduler root path in hdfs + */ + private void initHdfsPath(){ + String hdfsPath = PropertyUtils.getString(Constants.DATA_STORE_2_HDFS_BASEPATH); + Path path = new Path(hdfsPath); + + try { + if (!fs.exists(path)) { + fs.mkdirs(path); + } + } catch (Exception e) { + logger.error(e.getMessage(),e); + } + } + + + /** + * init hadoop configuration + */ + private void init() { + if (configuration == null) { + synchronized (HadoopUtils.class) { + if (configuration == null) { + try { + configuration = new Configuration(); + + String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE); + ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); + + if (resUploadType == ResUploadType.HDFS){ + if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)){ + System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, + PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)); + configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION,"kerberos"); + UserGroupInformation.setConfiguration(configuration); + UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME), + PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH)); + } + + String defaultFS = configuration.get(Constants.FS_DEFAULTFS); + //first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file + // the default is the local file system + if(defaultFS.startsWith("file")){ + String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS); + if(StringUtils.isNotBlank(defaultFSProp)){ + Map fsRelatedProps = PropertyUtils.getPrefixedProperties("fs."); + configuration.set(Constants.FS_DEFAULTFS,defaultFSProp); + fsRelatedProps.entrySet().stream().forEach(entry -> configuration.set(entry.getKey(), entry.getValue())); + }else{ + logger.error("property:{} can not to be empty, please set!"); + throw new RuntimeException("property:{} can not to be empty, please set!"); + } + }else{ + logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS); + } + + if (fs == null) { + if(StringUtils.isNotEmpty(hdfsUser)){ + //UserGroupInformation ugi = UserGroupInformation.createProxyUser(hdfsUser,UserGroupInformation.getLoginUser()); + UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser); + ugi.doAs(new PrivilegedExceptionAction() { + @Override + public Boolean run() throws Exception { + fs = FileSystem.get(configuration); + return true; + } + }); + }else{ + logger.warn("hdfs.root.user is not set value!"); + fs = FileSystem.get(configuration); + } + } + }else if (resUploadType == ResUploadType.S3){ + configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS)); + configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT)); + configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY)); + configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY)); + fs = FileSystem.get(configuration); + } + + + String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS); + String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS); + if (!StringUtils.isEmpty(rmHaIds)) { + appAddress = getAppAddress(appAddress, rmHaIds); + logger.info("appAddress : {}", appAddress); + } + configuration.set(Constants.YARN_APPLICATION_STATUS_ADDRESS, appAddress); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + } + } + } + } + + /** + * @return Configuration + */ + public Configuration getConfiguration() { + return configuration; + } + + /** + * get application url + * + * @param applicationId + * @return + */ + public String getApplicationUrl(String applicationId) { + return String.format(configuration.get(Constants.YARN_APPLICATION_STATUS_ADDRESS), applicationId); + } + + /** + * cat file on hdfs + * + * @param hdfsFilePath hdfs file path + * @return byte[] + */ + public byte[] catFile(String hdfsFilePath) throws IOException { + + if(StringUtils.isBlank(hdfsFilePath)){ + logger.error("hdfs file path:{} is blank",hdfsFilePath); + return null; + } + + FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath)); + return IOUtils.toByteArray(fsDataInputStream); + } + + + + /** + * cat file on hdfs + * + * @param hdfsFilePath hdfs file path + * @param skipLineNums skip line numbers + * @param limit read how many lines + * @return + */ + public List catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException { + + if (StringUtils.isBlank(hdfsFilePath)){ + logger.error("hdfs file path:{} is blank",hdfsFilePath); + return null; + } + + FSDataInputStream in = fs.open(new Path(hdfsFilePath)); + BufferedReader br = new BufferedReader(new InputStreamReader(in)); + Stream stream = br.lines().skip(skipLineNums).limit(limit); + return stream.collect(Collectors.toList()); + } + + /** + * make the given file and all non-existent parents into + * directories. Has the semantics of Unix 'mkdir -p'. + * Existence of the directory hierarchy is not an error. + * + * @param hdfsPath path to create + */ + public boolean mkdir(String hdfsPath) throws IOException { + return fs.mkdirs(new Path(hdfsPath)); + } + + /** + * copy files between FileSystems + * + * @param srcPath source hdfs path + * @param dstPath destination hdfs path + * @param deleteSource whether to delete the src + * @param overwrite whether to overwrite an existing file + * @return 是否成功 + */ + public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { + return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf()); + } + + /** + * the src file is on the local disk. Add it to FS at + * the given dst name. + + * @param srcFile local file + * @param dstHdfsPath destination hdfs path + * @param deleteSource whether to delete the src + * @param overwrite whether to overwrite an existing file + */ + public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException { + Path srcPath = new Path(srcFile); + Path dstPath= new Path(dstHdfsPath); + + fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath); + + return true; + } + + /** + * copy hdfs file to local + * + * @param srcHdfsFilePath source hdfs file path + * @param dstFile destination file + * @param deleteSource delete source + * @param overwrite overwrite + * @return + * @throws IOException + */ + public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, boolean overwrite) throws IOException { + Path srcPath = new Path(srcHdfsFilePath); + File dstPath = new File(dstFile); + + if (dstPath.exists()) { + if (dstPath.isFile()) { + if (overwrite) { + dstPath.delete(); + } + } else { + logger.error("destination file must be a file"); + } + } + + if(!dstPath.getParentFile().exists()){ + dstPath.getParentFile().mkdirs(); + } + + return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf()); + } + + /** + * + * delete a file + * + * @param hdfsFilePath the path to delete. + * @param recursive if path is a directory and set to + * true, the directory is deleted else throws an exception. In + * case of a file the recursive can be set to either true or false. + * @return true if delete is successful else false. + * @throws IOException + */ + public boolean delete(String hdfsFilePath, boolean recursive) throws IOException { + return fs.delete(new Path(hdfsFilePath), recursive); + } + + /** + * check if exists + * + * @param hdfsFilePath source file path + * @return + */ + public boolean exists(String hdfsFilePath) throws IOException { + return fs.exists(new Path(hdfsFilePath)); + } + + /** + * Gets a list of files in the directory + * + * @param filePath + * @return {@link FileStatus} + */ + public FileStatus[] listFileStatus(String filePath)throws Exception{ + Path path = new Path(filePath); + try { + return fs.listStatus(new Path(filePath)); + } catch (IOException e) { + logger.error("Get file list exception", e); + throw new Exception("Get file list exception", e); + } + } + + /** + * Renames Path src to Path dst. Can take place on local fs + * or remote DFS. + * @param src path to be renamed + * @param dst new path after rename + * @throws IOException on failure + * @return true if rename is successful + */ + public boolean rename(String src, String dst) throws IOException { + return fs.rename(new Path(src), new Path(dst)); + } + + + /** + * get the state of an application + * + * @param applicationId + * @return the return may be null or there may be other parse exceptions + * @throws JSONException + * @throws IOException + */ + public ExecutionStatus getApplicationStatus(String applicationId) throws JSONException { + if (StringUtils.isEmpty(applicationId)) { + return null; + } + + String applicationUrl = getApplicationUrl(applicationId); + + String responseContent = HttpUtils.get(applicationUrl); + + JSONObject jsonObject = JSONObject.parseObject(responseContent); + String result = jsonObject.getJSONObject("app").getString("finalStatus"); + + switch (result) { + case Constants.ACCEPTED: + return ExecutionStatus.SUBMITTED_SUCCESS; + case Constants.SUCCEEDED: + return ExecutionStatus.SUCCESS; + case Constants.NEW: + case Constants.NEW_SAVING: + case Constants.SUBMITTED: + case Constants.FAILED: + return ExecutionStatus.FAILURE; + case Constants.KILLED: + return ExecutionStatus.KILL; + + case Constants.RUNNING: + default: + return ExecutionStatus.RUNNING_EXEUTION; + } + } + + /** + * + * @return data hdfs path + */ + public static String getHdfsDataBasePath() { + String basePath = PropertyUtils.getString(Constants.DATA_STORE_2_HDFS_BASEPATH); + if ("/".equals(basePath)) { + // if basepath is configured to /, the generated url may be //default/resources (with extra leading /) + return ""; + } else { + return basePath; + } + } + + /** + * hdfs resource dir + * + * @param tenantCode tenant code + * @return hdfs resource dir + */ + public static String getHdfsResDir(String tenantCode) { + return String.format("%s/resources", getHdfsTenantDir(tenantCode)); + } + + /** + * hdfs user dir + * + * @param tenantCode tenant code + * @return hdfs resource dir + */ + public static String getHdfsUserDir(String tenantCode,int userId) { + return String.format("%s/home/%d", getHdfsTenantDir(tenantCode),userId); + } + + /** + * hdfs udf dir + * + * @param tenantCode tenant code + * @return get udf dir on hdfs + */ + public static String getHdfsUdfDir(String tenantCode) { + return String.format("%s/udfs", getHdfsTenantDir(tenantCode)); + } + + /** + * get absolute path and name for file on hdfs + * + * @param tenantCode tenant code + * @param filename file name + * @return get absolute path and name for file on hdfs + */ + public static String getHdfsFilename(String tenantCode, String filename) { + return String.format("%s/%s", getHdfsResDir(tenantCode), filename); + } + + /** + * get absolute path and name for udf file on hdfs + * + * @param tenantCode tenant code + * @param filename file name + * @return get absolute path and name for udf file on hdfs + */ + public static String getHdfsUdfFilename(String tenantCode, String filename) { + return String.format("%s/%s", getHdfsUdfDir(tenantCode), filename); + } + + /** + * @return file directory of tenants on hdfs + */ + public static String getHdfsTenantDir(String tenantCode) { + return String.format("%s/%s", getHdfsDataBasePath(), tenantCode); + } + + + /** + * getAppAddress + * + * @param appAddress + * @param rmHa + * @return + */ + public static String getAppAddress(String appAddress, String rmHa) { + + //get active ResourceManager + String activeRM = YarnHAAdminUtils.getAcitveRMName(rmHa); + + String[] split1 = appAddress.split(Constants.DOUBLE_SLASH); + + if (split1.length != 2) { + return null; + } + + String start = split1[0] + Constants.DOUBLE_SLASH; + String[] split2 = split1[1].split(Constants.COLON); + + if (split2.length != 2) { + return null; + } + + String end = Constants.COLON + split2[1]; + + return start + activeRM + end; + } + + + @Override + public void close() throws IOException { + if (fs != null) { + try { + fs.close(); + } catch (IOException e) { + logger.error("Close HadoopUtils instance failed", e); + throw new IOException("Close HadoopUtils instance failed", e); + } + } + } + + + /** + * yarn ha admin utils + */ + private static final class YarnHAAdminUtils extends RMAdminCLI { + + private static final Logger logger = LoggerFactory.getLogger(YarnHAAdminUtils.class); + + /** + * get active resourcemanager + * + * @param rmIds + * @return + */ + public static String getAcitveRMName(String rmIds) { + + String[] rmIdArr = rmIds.split(Constants.COMMA); + + int activeResourceManagerPort = PropertyUtils.getInt(Constants.HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT, 8088); + + String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info"; + + String state = null; + try { + /** + * send http get request to rm1 + */ + state = getRMState(String.format(yarnUrl, rmIdArr[0])); + + if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { + return rmIdArr[0]; + } else if (Constants.HADOOP_RM_STATE_STANDBY.equals(state)) { + state = getRMState(String.format(yarnUrl, rmIdArr[1])); + if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { + return rmIdArr[1]; + } + } else { + return null; + } + } catch (Exception e) { + state = getRMState(String.format(yarnUrl, rmIdArr[1])); + if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { + return rmIdArr[0]; + } + } + return null; + } + + + /** + * get ResourceManager state + * + * @param url + * @return + */ + public static String getRMState(String url) { + + String retStr = HttpUtils.get(url); + + if (StringUtils.isEmpty(retStr)) { + return null; + } + //to json + JSONObject jsonObject = JSON.parseObject(retStr); + + //get ResourceManager state + String state = jsonObject.getJSONObject("clusterInfo").getString("haState"); + return state; + } + + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java new file mode 100644 index 0000000000..684ea25270 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.http.HttpEntity; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; + +/** + * http utils + */ +public class HttpUtils { + + + public static final Logger logger = LoggerFactory.getLogger(HttpUtils.class); + + /** + * get http request content + * @param url + * @return http response + */ + public static String get(String url){ + CloseableHttpClient httpclient = HttpClients.createDefault(); + + HttpGet httpget = new HttpGet(url); + /** set timeout、request time、socket timeout */ + RequestConfig requestConfig = RequestConfig.custom().setConnectTimeout(Constants.HTTP_CONNECT_TIMEOUT) + .setConnectionRequestTimeout(Constants.HTTP_CONNECTION_REQUEST_TIMEOUT) + .setSocketTimeout(Constants.SOCKET_TIMEOUT) + .setRedirectsEnabled(true) + .build(); + httpget.setConfig(requestConfig); + String responseContent = null; + CloseableHttpResponse response = null; + + try { + response = httpclient.execute(httpget); + //check response status is 200 + if (response.getStatusLine().getStatusCode() == 200) { + HttpEntity entity = response.getEntity(); + if (entity != null) { + responseContent = EntityUtils.toString(entity, Constants.UTF_8); + }else{ + logger.warn("http entity is null"); + } + }else{ + logger.error("htt get:{} response status code is not 200!"); + } + }catch (Exception e){ + logger.error(e.getMessage(),e); + }finally { + try { + if (response != null) { + EntityUtils.consume(response.getEntity()); + response.close(); + } + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + + if (httpget != null && !httpget.isAborted()) { + httpget.releaseConnection(); + httpget.abort(); + } + + if (httpclient != null) { + try { + httpclient.close(); + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + } + } + return responseContent; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java new file mode 100644 index 0000000000..669560c7d4 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * http utils + */ +public class IpUtils { + + private static final Logger logger = LoggerFactory.getLogger(IpUtils.class); + public static final String DOT = "."; + + /** + * ip str to long

+ * + * @param ipStr ip string + */ + public static Long ipToLong(String ipStr) { + String[] ipSet = ipStr.split("\\" + DOT); + + return Long.parseLong(ipSet[0]) << 24 | Long.parseLong(ipSet[1]) << 16 | Long.parseLong(ipSet[2]) << 8 | Long.parseLong(ipSet[3]); + } + + /** + * long to ip + * @param ipLong the long number converted from IP + * @return String + */ + public static String longToIp(long ipLong) { + long[] ipNumbers = new long[4]; + long tmp = 0xFF; + ipNumbers[0] = ipLong >> 24 & tmp; + ipNumbers[1] = ipLong >> 16 & tmp; + ipNumbers[2] = ipLong >> 8 & tmp; + ipNumbers[3] = ipLong & tmp; + + StringBuilder sb = new StringBuilder(16); + sb.append(ipNumbers[0]).append(DOT) + .append(ipNumbers[1]).append(DOT) + .append(ipNumbers[2]).append(DOT) + .append(ipNumbers[3]); + return sb.toString(); + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java new file mode 100644 index 0000000000..7f7ce30d08 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java @@ -0,0 +1,271 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.*; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.*; + +/** + * json utils + */ +public class JSONUtils { + + private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class); + + /** + * can use static singleton, inject: just make sure to reuse! + */ + private static final ObjectMapper objectMapper = new ObjectMapper(); + + /** + * init + */ + private static final JSONUtils instance = new JSONUtils(); + + + private JSONUtils() { + //Feature that determines whether encountering of unknown properties, false means not analyzer unknown properties + objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false).setTimeZone(TimeZone.getDefault()); + } + + /** + * json representation of object + * @param object + * @return object to json string + */ + public static String toJson(Object object) { + try{ + return JSONObject.toJSONString(object,false); + } catch (Exception e) { + logger.error("object to json exception!",e); + } + + return null; + } + + + /** + * + * This method deserializes the specified Json into an object of the specified class. It is not + * suitable to use if the specified class is a generic type since it will not have the generic + * type information because of the Type Erasure feature of Java. Therefore, this method should not + * be used if the desired type is a generic type. Note that this method works fine if the any of + * the fields of the specified object are generics, just the object itself should not be a + * generic type. + * + * @param json the string from which the object is to be deserialized + * @param clazz the class of T + * @return an object of type T from the string + * classOfT + */ + public static T parseObject(String json, Class clazz) { + if (StringUtils.isEmpty(json)) { + return null; + } + + try { + return JSONObject.parseObject(json, clazz); + } catch (Exception e) { + logger.error("parse object exception!",e); + } + return null; + } + + + /** + * json to list + * + * @param json + * @param clazz c + * @param + * @return + */ + public static List toList(String json, Class clazz) { + if (StringUtils.isEmpty(json)) { + return new ArrayList<>(); + } + try { + return JSONArray.parseArray(json, clazz); + } catch (Exception e) { + logger.error("JSONArray.parseArray exception!",e); + } + + return new ArrayList<>(); + } + + + + /** + * check json object valid + * + * @param json + * @return + */ + public static boolean checkJsonVaild(String json) { + + if (StringUtils.isEmpty(json)) { + return false; + } + + try { + objectMapper.readTree(json); + return true; + } catch (IOException e) { + logger.error("check json object valid exception!",e); + } + + return false; + } + + + /** + * Method for finding a JSON Object field with specified name in this + * node or its child nodes, and returning value it has. + * If no matching field is found in this node or its descendants, returns null. + * + * @param fieldName Name of field to look for + * + * @return Value of first matching node found, if any; null if none + */ + public static String findValue(JsonNode jsonNode, String fieldName) { + JsonNode node = jsonNode.findValue(fieldName); + + if (node == null) { + return null; + } + + return node.toString(); + } + + + /** + * json to map + * + * {@link #toMap(String, Class, Class)} + * + * @param json + * @return + */ + public static Map toMap(String json) { + if (StringUtils.isEmpty(json)) { + return null; + } + + try { + return JSONObject.parseObject(json, new TypeReference>(){}); + } catch (Exception e) { + logger.error("json to map exception!",e); + } + + return null; + } + + /** + * + * json to map + * + *

+   *         String jsonStr = "{\"id\":\"1001\",\"name\":\"Jobs\"}";
+   *         Map models = JSONUtils.toMap(jsonStr, String.class,String.class);
+   * 
+ * @param json + * @param classK + * @param classV + * @param + * @param + * @return + */ + public static Map toMap(String json, Class classK, Class classV) { + if (StringUtils.isEmpty(json)) { + return null; + } + + try { + return JSONObject.parseObject(json, new TypeReference>() {}); + } catch (Exception e) { + logger.error("json to map exception!",e); + } + + return null; + } + + /** + * 对象装json字符串

+ * + * @return json string + */ + public static String toJsonString(Object object) { + try{ + return JSONObject.toJSONString(object,false); + } catch (Exception e) { + throw new RuntimeException("Json deserialization exception.", e); + } + } + + public static JSONObject parseObject(String text) { + try{ + return JSONObject.parseObject(text); + } catch (Exception e) { + throw new RuntimeException("Json deserialization exception.", e); + } + } + + public static JSONArray parseArray(String text) { + try{ + return JSONObject.parseArray(text); + } catch (Exception e) { + throw new RuntimeException("Json deserialization exception.", e); + } + } + + + + /** + * json serializer + */ + public static class JsonDataSerializer extends JsonSerializer { + + @Override + public void serialize(String value, JsonGenerator gen, SerializerProvider provider) throws IOException { + gen.writeRawValue(value); + } + + } + + /** + * json data deserializer + */ + public static class JsonDataDeserializer extends JsonDeserializer { + + @Override + public String deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + JsonNode node = p.getCodec().readTree(p); + return node.toString(); + } + + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java new file mode 100644 index 0000000000..46b5b9670d --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java @@ -0,0 +1,297 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.shell.ShellExecutor; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import oshi.SystemInfo; +import oshi.hardware.CentralProcessor; +import oshi.hardware.GlobalMemory; +import oshi.hardware.HardwareAbstractionLayer; + +import java.io.BufferedReader; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.lang.management.ManagementFactory; +import java.lang.management.RuntimeMXBean; +import java.math.RoundingMode; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.List; + +/** + * os utils + * + */ +public class OSUtils { + + private static final Logger logger = LoggerFactory.getLogger(OSUtils.class); + + private static final SystemInfo SI = new SystemInfo(); + public static final String TWO_DECIMAL = "0.00"; + + private static HardwareAbstractionLayer hal = SI.getHardware(); + + private OSUtils() {} + + + /** + * get memory usage + * Keep 2 decimal + * @return percent % + */ + public static double memoryUsage() { + GlobalMemory memory = hal.getMemory(); + double memoryUsage = (memory.getTotal() - memory.getAvailable() - memory.getSwapUsed()) * 0.1 / memory.getTotal() * 10; + + DecimalFormat df = new DecimalFormat(TWO_DECIMAL); + df.setRoundingMode(RoundingMode.HALF_UP); + return Double.parseDouble(df.format(memoryUsage)); + } + + + /** + * get available physical memory size + * + * Keep 2 decimal + * @return available Physical Memory Size, unit: G + */ + public static double availablePhysicalMemorySize() { + GlobalMemory memory = hal.getMemory(); + double availablePhysicalMemorySize = (memory.getAvailable() + memory.getSwapUsed()) /1024.0/1024/1024; + + DecimalFormat df = new DecimalFormat(TWO_DECIMAL); + df.setRoundingMode(RoundingMode.HALF_UP); + return Double.parseDouble(df.format(availablePhysicalMemorySize)); + + } + + /** + * get total physical memory size + * + * Keep 2 decimal + * @return available Physical Memory Size, unit: G + */ + public static double totalMemorySize() { + GlobalMemory memory = hal.getMemory(); + double availablePhysicalMemorySize = memory.getTotal() /1024.0/1024/1024; + + DecimalFormat df = new DecimalFormat(TWO_DECIMAL); + df.setRoundingMode(RoundingMode.HALF_UP); + return Double.parseDouble(df.format(availablePhysicalMemorySize)); + } + + + /** + * load average + * + * @return + */ + public static double loadAverage() { + double loadAverage = hal.getProcessor().getSystemLoadAverage(); + + DecimalFormat df = new DecimalFormat(TWO_DECIMAL); + + df.setRoundingMode(RoundingMode.HALF_UP); + return Double.parseDouble(df.format(loadAverage)); + } + + /** + * get cpu usage + * + * @return + */ + public static double cpuUsage() { + CentralProcessor processor = hal.getProcessor(); + double cpuUsage = processor.getSystemCpuLoad(); + + DecimalFormat df = new DecimalFormat(TWO_DECIMAL); + df.setRoundingMode(RoundingMode.HALF_UP); + + return Double.parseDouble(df.format(cpuUsage)); + } + + + /** + * get user list + * + * @return + */ + public static List getUserList() { + List userList = new ArrayList<>(); + BufferedReader bufferedReader = null; + + try { + bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream("/etc/passwd"))); + String line; + + while ((line = bufferedReader.readLine()) != null) { + if (line.contains(":")) { + String[] userInfo = line.split(":"); + userList.add(userInfo[0]); + } + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } finally { + try { + bufferedReader.close(); + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + } + + return userList; + } + + /** + * get system group information + * @return + * @throws IOException + */ + public static String getGroup() throws IOException { + String result = exeCmd("groups"); + + if (StringUtils.isNotEmpty(result)) { + String[] groupInfo = StringUtils.split(result); + return groupInfo[0]; + } + + return null; + } + + /** + * Execute the corresponding command of Linux or Windows + * + * @param command + * @return + * @throws IOException + */ + public static String exeCmd(String command) throws IOException { + BufferedReader br = null; + + try { + Process p = Runtime.getRuntime().exec(command); + br = new BufferedReader(new InputStreamReader(p.getInputStream())); + String line; + StringBuilder sb = new StringBuilder(); + + while ((line = br.readLine()) != null) { + sb.append(line + "\n"); + } + + return sb.toString(); + } finally { + if (br != null) { + try { + br.close(); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + } + } + + /** + * Execute the shell + * @param command + * @return + * @throws IOException + */ + public static String exeShell(String command) throws IOException { + return ShellExecutor.execCommand(command); + } + + /** + * get process id + * @return + */ + public static int getProcessID() { + RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean(); + return Integer.parseInt(runtimeMXBean.getName().split("@")[0]); + } + + /** + * get local host + * @return + */ + public static String getHost(){ + try { + return InetAddress.getLocalHost().getHostAddress(); + } catch (UnknownHostException e) { + logger.error(e.getMessage(),e); + } + return null; + } + + + /** + * whether is macOS + */ + public static boolean isMacOS() { + String os = System.getProperty("os.name"); + return os.startsWith("Mac"); + } + + + /** + * whether is windows + */ + public static boolean isWindows() { + String os = System.getProperty("os.name"); + return os.startsWith("Windows"); + } + + + /** + * check memory and cpu usage + * @param conf + * @return + */ + public static Boolean checkResource(Configuration conf, Boolean isMaster){ + double systemCpuLoad; + double systemReservedMemory; + + if(isMaster){ + systemCpuLoad = conf.getDouble(Constants.MASTER_MAX_CPULOAD_AVG, Constants.defaultMasterCpuLoad); + systemReservedMemory = conf.getDouble(Constants.MASTER_RESERVED_MEMORY, Constants.defaultMasterReservedMemory); + }else{ + systemCpuLoad = conf.getDouble(Constants.WORKER_MAX_CPULOAD_AVG, Constants.defaultWorkerCpuLoad); + systemReservedMemory = conf.getDouble(Constants.WORKER_RESERVED_MEMORY, Constants.defaultWorkerReservedMemory); + } + + // judging usage + double loadAverage = OSUtils.loadAverage(); + // + double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); + + if(loadAverage > systemCpuLoad || availablePhysicalMemorySize < systemReservedMemory){ + logger.warn("load or availablePhysicalMemorySize(G) is too high, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize , loadAverage); + return false; + }else{ + return true; + } + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java new file mode 100644 index 0000000000..c139ec7990 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java @@ -0,0 +1,176 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.DataType; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; +import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils; +import org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.time.DateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.PreparedStatement; +import java.text.ParseException; +import java.util.*; + +/** + * parameter parse utils + */ +public class ParameterUtils { + + private static final Logger logger = LoggerFactory.getLogger(ParameterUtils.class); + + /** + * convert parameters place holders + * + * @param parameterString + * @param parameterMap + * @return + */ + public static String convertParameterPlaceholders(String parameterString, Map parameterMap) { + if (StringUtils.isEmpty(parameterString)) { + return parameterString; + } + + //Get current time, schedule execute time + String cronTimeStr = parameterMap.get(Constants.PARAMETER_DATETIME); + + Date cronTime = null; + + if (StringUtils.isNotEmpty(cronTimeStr)) { + try { + cronTime = DateUtils.parseDate(cronTimeStr, new String[]{Constants.PARAMETER_FORMAT_TIME}); + } catch (ParseException e) { + logger.error(String.format("parse %s exception", cronTimeStr), e); + } + } else { + cronTime = new Date(); + } + + // replace variable ${} form,refers to the replacement of system variables and custom variables + parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true); + + // replace time $[...] form, eg. $[yyyyMMdd] + if (cronTime != null) { + parameterString = TimePlaceholderUtils.replacePlaceholders(parameterString, cronTime, true); + } + + return parameterString; + } + + /** + * set in parameter + * @param index + * @param stmt + * @param dataType + * @param value + * @throws Exception + */ + public static void setInParameter(int index, PreparedStatement stmt, DataType dataType, String value)throws Exception{ + if (dataType.equals(DataType.VARCHAR)){ + stmt.setString(index,value); + }else if (dataType.equals(DataType.INTEGER)){ + stmt.setInt(index, Integer.parseInt(value)); + }else if (dataType.equals(DataType.LONG)){ + stmt.setLong(index, Long.parseLong(value)); + }else if (dataType.equals(DataType.FLOAT)){ + stmt.setFloat(index, Float.parseFloat(value)); + }else if (dataType.equals(DataType.DOUBLE)){ + stmt.setDouble(index, Double.parseDouble(value)); + }else if (dataType.equals(DataType.DATE)){ + stmt.setString(index, value); + }else if (dataType.equals(DataType.TIME)){ + stmt.setString(index, value); + }else if (dataType.equals(DataType.TIMESTAMP)){ + stmt.setString(index, value); + }else if (dataType.equals(DataType.BOOLEAN)){ + stmt.setBoolean(index,Boolean.parseBoolean(value)); + } + } + + /** + * curing user define parameters + * + * @return + */ + public static String curingGlobalParams(Map globalParamMap, List globalParamList, + CommandType commandType, Date scheduleTime){ + Map globalMap = new HashMap<>(); + if(globalParamMap!= null){ + globalMap.putAll(globalParamMap); + } + Map allParamMap = new HashMap<>(); + //如果是补数,需要传入一个补数时间,根据任务类型 + Map timeParams = BusinessTimeUtils + .getBusinessTime(commandType, scheduleTime); + + if (timeParams != null) { + allParamMap.putAll(timeParams); + } + + if (globalMap != null) { + allParamMap.putAll(globalMap); + } + + Set> entries = allParamMap.entrySet(); + + Map resolveMap = new HashMap<>(); + for (Map.Entry entry : entries){ + String val = entry.getValue(); + if (val.startsWith("$")){ + String str = ParameterUtils.convertParameterPlaceholders(val, allParamMap); + resolveMap.put(entry.getKey(),str); + } + } + + if (globalMap != null){ + globalMap.putAll(resolveMap); + } + + if (globalParamList != null && globalParamList.size() > 0){ + + for (Property property : globalParamList){ + String val = globalMap.get(property.getProp()); + if (val != null){ + property.setValue(val); + } + } + return JSONObject.toJSONString(globalParamList); + } + return null; + } + + + /** + * handle escapes + * @param inputString + * @return + */ + public static String handleEscapes(String inputString){ + + if(StringUtils.isNotEmpty(inputString)){ + return inputString.replace("%", "////%"); + } + return inputString; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java new file mode 100644 index 0000000000..c775ce77bc --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java @@ -0,0 +1,215 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ResUploadType; +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import static org.apache.dolphinscheduler.common.Constants.COMMON_PROPERTIES_PATH; +import static org.apache.dolphinscheduler.common.Constants.HADOOP_PROPERTIES_PATH; + +/** + * property utils + * single instance + */ +public class PropertyUtils { + + /** + * logger + */ + private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class); + + private static final Properties properties = new Properties(); + + private static final PropertyUtils propertyUtils = new PropertyUtils(); + + private PropertyUtils(){ + init(); + } + + private void init(){ + String[] propertyFiles = new String[]{HADOOP_PROPERTIES_PATH,COMMON_PROPERTIES_PATH}; + for (String fileName : propertyFiles) { + InputStream fis = null; + try { + fis = PropertyUtils.class.getResourceAsStream(fileName); + properties.load(fis); + + } catch (IOException e) { + logger.error(e.getMessage(), e); + System.exit(1); + } finally { + IOUtils.closeQuietly(fis); + } + } + } + + /** + * judge whether resource upload startup + * @return + */ + public static Boolean getResUploadStartupState(){ + String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE); + ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); + return resUploadType == ResUploadType.HDFS || resUploadType == ResUploadType.S3; + } + + /** + * get property value + * + * @param key property name + * @return + */ + public static String getString(String key) { + return properties.getProperty(key.trim()); + } + + /** + * get property value + * + * @param key property name + * @return get property int value , if key == null, then return -1 + */ + public static int getInt(String key) { + return getInt(key, -1); + } + + /** + * + * @param key + * @param defaultValue + * @return + */ + public static int getInt(String key, int defaultValue) { + String value = getString(key); + if (value == null) { + return defaultValue; + } + + try { + return Integer.parseInt(value); + } catch (NumberFormatException e) { + logger.info(e.getMessage(),e); + } + return defaultValue; + } + + /** + * get property value + * + * @param key property name + * @return + */ + public static Boolean getBoolean(String key) { + String value = properties.getProperty(key.trim()); + if(null != value){ + return Boolean.parseBoolean(value); + } + + return null; + } + + /** + * get property long value + * @param key + * @param defaultVal + * @return + */ + public static long getLong(String key, long defaultVal) { + String val = getString(key); + return val == null ? defaultVal : Long.parseLong(val); + } + + /** + * + * @param key + * @return + */ + public static long getLong(String key) { + return getLong(key,-1); + } + + /** + * + * @param key + * @param defaultVal + * @return + */ + public double getDouble(String key, double defaultVal) { + String val = getString(key); + return val == null ? defaultVal : Double.parseDouble(val); + } + + + /** + * get array + * @param key property name + * @param splitStr separator + * @return + */ + public static String[] getArray(String key, String splitStr) { + String value = getString(key); + if (value == null) { + return null; + } + try { + String[] propertyArray = value.split(splitStr); + return propertyArray; + } catch (NumberFormatException e) { + logger.info(e.getMessage(),e); + } + return null; + } + + /** + * + * @param key + * @param type + * @param defaultValue + * @param + * @return get enum value + */ + public > T getEnum(String key, Class type, + T defaultValue) { + String val = getString(key); + return val == null ? defaultValue : Enum.valueOf(type, val); + } + + /** + * get all properties with specified prefix, like: fs. + * @param prefix prefix to search + * @return + */ + public static Map getPrefixedProperties(String prefix) { + Map matchedProperties = new HashMap<>(); + for (String propName : properties.stringPropertyNames()) { + if (propName.startsWith(prefix)) { + matchedProperties.put(propName, properties.getProperty(propName)); + } + } + return matchedProperties; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java new file mode 100644 index 0000000000..e83dba71fb --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.model.MasterServer; + +import java.util.Date; + +/** + * heartbeat for ZK reigster res info + */ +public class ResInfo { + + /** + * cpuUsage + */ + private double cpuUsage; + + /** + * memoryUsage + */ + private double memoryUsage; + + /** + * loadAverage + */ + private double loadAverage; + + public ResInfo(){} + + public ResInfo(double cpuUsage , double memoryUsage){ + this.cpuUsage = cpuUsage ; + this.memoryUsage = memoryUsage; + } + + public ResInfo(double cpuUsage, double memoryUsage, double loadAverage) { + this.cpuUsage = cpuUsage; + this.memoryUsage = memoryUsage; + this.loadAverage = loadAverage; + } + + public double getCpuUsage() { + return cpuUsage; + } + + public void setCpuUsage(double cpuUsage) { + this.cpuUsage = cpuUsage; + } + + public double getMemoryUsage() { + return memoryUsage; + } + + public void setMemoryUsage(double memoryUsage) { + this.memoryUsage = memoryUsage; + } + + public double getLoadAverage() { + return loadAverage; + } + + public void setLoadAverage(double loadAverage) { + this.loadAverage = loadAverage; + } + + /** + * get CPU and memory usage + * add cpu load average by lidong for service monitor + * @return + */ + public static String getResInfoJson(){ + ResInfo resInfo = new ResInfo(OSUtils.cpuUsage(), OSUtils.memoryUsage(),OSUtils.loadAverage()); + return JSONUtils.toJson(resInfo); + } + + + /** + * get CPU and memory usage + * @return + */ + public static String getResInfoJson(double cpuUsage , double memoryUsage){ + ResInfo resInfo = new ResInfo(cpuUsage,memoryUsage); + return JSONUtils.toJson(resInfo); + } + + + public static String getHeartBeatInfo(Date now){ + return buildHeartbeatForZKInfo(OSUtils.getHost(), + OSUtils.getProcessID(), + OSUtils.cpuUsage(), + OSUtils.memoryUsage(), + DateUtils.dateToString(now), + DateUtils.dateToString(now)); + + } + + /** + * build heartbeat info for zk + * @param host + * @param port + * @param cpuUsage + * @param memoryUsage + * @param createTime + * @param lastHeartbeatTime + * @return + */ + public static String buildHeartbeatForZKInfo(String host , int port , + double cpuUsage , double memoryUsage, + String createTime,String lastHeartbeatTime){ + + return host + Constants.COMMA + port + Constants.COMMA + + cpuUsage + Constants.COMMA + + memoryUsage + Constants.COMMA + + createTime + Constants.COMMA + + lastHeartbeatTime; + } + + /** + * parse heartbeat info for zk + * @param heartBeatInfo + * @return + */ + public static MasterServer parseHeartbeatForZKInfo(String heartBeatInfo){ + MasterServer masterServer = null; + String[] masterArray = heartBeatInfo.split(Constants.COMMA); + if(masterArray.length != 6){ + return masterServer; + + } + masterServer = new MasterServer(); + masterServer.setHost(masterArray[0]); + masterServer.setPort(Integer.parseInt(masterArray[1])); + masterServer.setResInfo(getResInfoJson(Double.parseDouble(masterArray[2]), Double.parseDouble(masterArray[3]))); + masterServer.setCreateTime(DateUtils.stringToDate(masterArray[4])); + masterServer.setLastHeartbeatTime(DateUtils.stringToDate(masterArray[5])); + return masterServer; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SchemaUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SchemaUtils.java new file mode 100644 index 0000000000..4d0621beeb --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SchemaUtils.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Metadata related common classes + * + */ +public class SchemaUtils { + + private static final Logger logger = LoggerFactory.getLogger(SchemaUtils.class); + private static Pattern p = Pattern.compile("\\s*|\t|\r|\n"); + + /** + * 获取所有upgrade目录下的可升级的schema + * Gets upgradable schemas for all upgrade directories + * @return + */ + @SuppressWarnings("unchecked") + public static List getAllSchemaList() { + List schemaDirList = new ArrayList<>(); + File[] schemaDirArr = FileUtils.getAllDir("sql/upgrade"); + if(schemaDirArr == null || schemaDirArr.length == 0) { + return null; + } + + for(File file : schemaDirArr) { + schemaDirList.add(file.getName()); + } + + Collections.sort(schemaDirList , new Comparator() { + @Override + public int compare(Object o1 , Object o2){ + try { + String dir1 = String.valueOf(o1); + String dir2 = String.valueOf(o2); + String version1 = dir1.split("_")[0]; + String version2 = dir2.split("_")[0]; + if(version1.equals(version2)) { + return 0; + } + + if(SchemaUtils.isAGreatVersion(version1, version2)) { + return 1; + } + + return -1; + + } catch (Exception e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e); + } + } + }); + + return schemaDirList; + } + + /** + * 判断schemaVersion是否比version版本高 + * Determine whether schemaVersion is higher than version + * @param schemaVersion + * @param version + * @return + */ + public static boolean isAGreatVersion(String schemaVersion, String version) { + if(StringUtils.isEmpty(schemaVersion) || StringUtils.isEmpty(version)) { + throw new RuntimeException("schemaVersion or version is empty"); + } + + String[] schemaVersionArr = schemaVersion.split("\\."); + String[] versionArr = version.split("\\."); + int arrLength = schemaVersionArr.length < versionArr.length ? schemaVersionArr.length : versionArr.length; + for(int i = 0 ; i < arrLength ; i++) { + if(Integer.valueOf(schemaVersionArr[i]) > Integer.valueOf(versionArr[i])) { + return true; + }else if(Integer.valueOf(schemaVersionArr[i]) < Integer.valueOf(versionArr[i])) { + return false; + } + } + + // 说明直到第arrLength-1个元素,两个版本号都一样,此时谁的arrLength大,谁的版本号就大 + // If the version and schema version is the same from 0 up to the arrlength-1 element,whoever has a larger arrLength has a larger version number + return schemaVersionArr.length > versionArr.length; + } + + /** + * Gets the current software version number of the system + * @return + */ + public static String getSoftVersion() { + String soft_version; + try { + soft_version = FileUtils.readFile2Str(new FileInputStream(new File("sql/soft_version"))); + soft_version = replaceBlank(soft_version); + } catch (FileNotFoundException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException("Failed to get the product version description file. The file could not be found", e); + } catch (IOException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException("Failed to get product version number description file, failed to read the file", e); + } + return soft_version; + } + + /** + * 去掉字符串中的空格回车换行和制表符 + * Strips the string of space carriage returns and tabs + * @param str + * @return + */ + public static String replaceBlank(String str) { + String dest = ""; + if (str!=null) { + + Matcher m = p.matcher(str); + dest = m.replaceAll(""); + } + return dest; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java new file mode 100644 index 0000000000..f30b20f6b2 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java @@ -0,0 +1,317 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.LineNumberReader; +import java.io.Reader; +import java.sql.*; + +/* + * Slightly modified version of the com.ibatis.common.jdbc.ScriptRunner class + * from the iBATIS Apache project. Only removed dependency on Resource class + * and a constructor + */ +/* + * Copyright 2004 Clinton Begin + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Tool to run database scripts + */ +public class ScriptRunner { + + public static final Logger logger = LoggerFactory.getLogger(ScriptRunner.class); + + private static final String DEFAULT_DELIMITER = ";"; + + private Connection connection; + + private boolean stopOnError; + private boolean autoCommit; + + private String delimiter = DEFAULT_DELIMITER; + private boolean fullLineDelimiter = false; + + /** + * Default constructor + */ + public ScriptRunner(Connection connection, boolean autoCommit, boolean stopOnError) { + this.connection = connection; + this.autoCommit = autoCommit; + this.stopOnError = stopOnError; + } + + public static void main(String[] args) { + String dbName = "db_mmu"; + String appKey = dbName.substring(dbName.lastIndexOf("_")+1, dbName.length()); + System.out.println(appKey); + } + + public void setDelimiter(String delimiter, boolean fullLineDelimiter) { + this.delimiter = delimiter; + this.fullLineDelimiter = fullLineDelimiter; + } + + /** + * Runs an SQL script (read in using the Reader parameter) + * + * @param reader + * - the source of the script + */ + public void runScript(Reader reader) throws IOException, SQLException { + try { + boolean originalAutoCommit = connection.getAutoCommit(); + try { + if (originalAutoCommit != this.autoCommit) { + connection.setAutoCommit(this.autoCommit); + } + runScript(connection, reader); + } finally { + connection.setAutoCommit(originalAutoCommit); + } + } catch (IOException e) { + throw e; + } catch (SQLException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException("Error running script. Cause: " + e, e); + } + } + + public void runScript(Reader reader, String dbName) throws IOException, SQLException { + try { + boolean originalAutoCommit = connection.getAutoCommit(); + try { + if (originalAutoCommit != this.autoCommit) { + connection.setAutoCommit(this.autoCommit); + } + runScript(connection, reader, dbName); + } finally { + connection.setAutoCommit(originalAutoCommit); + } + } catch (IOException e) { + throw e; + } catch (SQLException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException("Error running script. Cause: " + e, e); + } + } + + /** + * Runs an SQL script (read in using the Reader parameter) using the connection + * passed in + * + * @param conn + * - the connection to use for the script + * @param reader + * - the source of the script + * @throws SQLException + * if any SQL errors occur + * @throws IOException + * if there is an error reading from the Reader + */ + private void runScript(Connection conn, Reader reader) throws IOException, SQLException { + StringBuffer command = null; + try { + LineNumberReader lineReader = new LineNumberReader(reader); + String line = null; + while ((line = lineReader.readLine()) != null) { + if (command == null) { + command = new StringBuffer(); + } + String trimmedLine = line.trim(); + if (trimmedLine.startsWith("--")) { + logger.info(trimmedLine); + } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) { + // Do nothing + } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("--")) { + // Do nothing + + } else if (trimmedLine.startsWith("delimiter")) { + String newDelimiter = trimmedLine.split(" ")[1]; + this.setDelimiter(newDelimiter, fullLineDelimiter); + + } else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter()) + || fullLineDelimiter && trimmedLine.equals(getDelimiter())) { + command.append(line.substring(0, line.lastIndexOf(getDelimiter()))); + command.append(" "); + Statement statement = conn.createStatement(); + + // logger.info(command.toString()); + + boolean hasResults = false; + logger.info("sql:"+command.toString()); + if (stopOnError) { + hasResults = statement.execute(command.toString()); + } else { + try { + statement.execute(command.toString()); + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw e; + } + } + + ResultSet rs = statement.getResultSet(); + if (hasResults && rs != null) { + ResultSetMetaData md = rs.getMetaData(); + int cols = md.getColumnCount(); + for (int i = 0; i < cols; i++) { + String name = md.getColumnLabel(i); + logger.info(name + "\t"); + } + logger.info(""); + while (rs.next()) { + for (int i = 0; i < cols; i++) { + String value = rs.getString(i); + logger.info(value + "\t"); + } + logger.info(""); + } + } + + command = null; + try { + statement.close(); + } catch (Exception e) { + // Ignore to workaround a bug in Jakarta DBCP + } + Thread.yield(); + } else { + command.append(line); + command.append(" "); + } + } + + } catch (SQLException e) { + logger.error("Error executing: " + command.toString()); + throw e; + } catch (IOException e) { + e.fillInStackTrace(); + logger.error("Error executing: " + command.toString()); + throw e; + } + } + + private void runScript(Connection conn, Reader reader , String dbName) throws IOException, SQLException { + StringBuffer command = null; + String sql = ""; + String appKey = dbName.substring(dbName.lastIndexOf("_")+1, dbName.length()); + try { + LineNumberReader lineReader = new LineNumberReader(reader); + String line = null; + while ((line = lineReader.readLine()) != null) { + if (command == null) { + command = new StringBuffer(); + } + String trimmedLine = line.trim(); + if (trimmedLine.startsWith("--")) { + logger.info(trimmedLine); + } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) { + // Do nothing + } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("--")) { + // Do nothing + + } else if (trimmedLine.startsWith("delimiter")) { + String newDelimiter = trimmedLine.split(" ")[1]; + this.setDelimiter(newDelimiter, fullLineDelimiter); + + } else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter()) + || fullLineDelimiter && trimmedLine.equals(getDelimiter())) { + command.append(line.substring(0, line.lastIndexOf(getDelimiter()))); + command.append(" "); + Statement statement = conn.createStatement(); + + // logger.info(command.toString()); + + sql = command.toString().replaceAll("\\{\\{APPDB\\}\\}", dbName); + boolean hasResults = false; + logger.info("sql:"+sql); + if (stopOnError) { + hasResults = statement.execute(sql); + } else { + try { + statement.execute(sql); + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw e; + } + } + + ResultSet rs = statement.getResultSet(); + if (hasResults && rs != null) { + ResultSetMetaData md = rs.getMetaData(); + int cols = md.getColumnCount(); + for (int i = 0; i < cols; i++) { + String name = md.getColumnLabel(i); + logger.info(name + "\t"); + } + logger.info(""); + while (rs.next()) { + for (int i = 0; i < cols; i++) { + String value = rs.getString(i); + logger.info(value + "\t"); + } + logger.info(""); + } + } + + command = null; + try { + statement.close(); + } catch (Exception e) { + // Ignore to workaround a bug in Jakarta DBCP + } + Thread.yield(); + } else { + command.append(line); + command.append(" "); + } + } + + } catch (SQLException e) { + logger.error("Error executing: " + sql); + throw e; + } catch (IOException e) { + e.fillInStackTrace(); + logger.error("Error executing: " + sql); + throw e; + } + } + + private String getDelimiter() { + return delimiter; + } + +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java new file mode 100644 index 0000000000..c5cbc78efd --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; +import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; +import org.apache.dolphinscheduler.common.task.http.HttpParameters; +import org.apache.dolphinscheduler.common.task.mr.MapreduceParameters; +import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; +import org.apache.dolphinscheduler.common.task.python.PythonParameters; +import org.apache.dolphinscheduler.common.task.shell.ShellParameters; +import org.apache.dolphinscheduler.common.task.spark.SparkParameters; +import org.apache.dolphinscheduler.common.task.sql.SqlParameters; +import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters; +import org.apache.commons.lang3.EnumUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * task parameters utils + */ +public class TaskParametersUtils { + + private static Logger logger = LoggerFactory.getLogger(TaskParametersUtils.class); + + /** + * get task parameters + * @param taskType + * @param parameter + * @return task parameters + */ + public static AbstractParameters getParameters(String taskType, String parameter) { + try { + switch (EnumUtils.getEnum(TaskType.class,taskType)) { + case SUB_PROCESS: + return JSONUtils.parseObject(parameter, SubProcessParameters.class); + case SHELL: + return JSONUtils.parseObject(parameter, ShellParameters.class); + case PROCEDURE: + return JSONUtils.parseObject(parameter, ProcedureParameters.class); + case SQL: + return JSONUtils.parseObject(parameter, SqlParameters.class); + case MR: + return JSONUtils.parseObject(parameter, MapreduceParameters.class); + case SPARK: + return JSONUtils.parseObject(parameter, SparkParameters.class); + case PYTHON: + return JSONUtils.parseObject(parameter, PythonParameters.class); + case DEPENDENT: + return JSONUtils.parseObject(parameter, DependentParameters.class); + case FLINK: + return JSONUtils.parseObject(parameter, FlinkParameters.class); + case HTTP: + return JSONUtils.parseObject(parameter, HttpParameters.class); + default: + return null; + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + return null; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/dependent/DependentDateUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/dependent/DependentDateUtils.java new file mode 100644 index 0000000000..574343d0cb --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/dependent/DependentDateUtils.java @@ -0,0 +1,169 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils.dependent; + +import org.apache.dolphinscheduler.common.model.DateInterval; +import org.apache.dolphinscheduler.common.utils.DateUtils; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +public class DependentDateUtils { + + /** + * get last day interval list + * @param businessDate + * @param hourNumber + * @return + */ + public static List getLastHoursInterval(Date businessDate, int hourNumber){ + List dateIntervals = new ArrayList<>(); + for(int index = hourNumber; index > 0; index--){ + Date lastHour = DateUtils.getSomeHourOfDay(businessDate, index); + Date beginTime = DateUtils.getStartOfHour(lastHour); + Date endTime = DateUtils.getEndOfHour(lastHour); + dateIntervals.add(new DateInterval(beginTime, endTime)); + } + return dateIntervals; + } + + /** + * get today day interval list + * @param businessDate + * @return + */ + public static List getTodayInterval(Date businessDate){ + + List dateIntervals = new ArrayList<>(); + + Date beginTime = DateUtils.getStartOfDay(businessDate); + Date endTime = DateUtils.getEndOfDay(businessDate); + dateIntervals.add(new DateInterval(beginTime, endTime)); + return dateIntervals; + } + + /** + * get last day interval list + * @param businessDate + * @param someDay + * @return + */ + public static List getLastDayInterval(Date businessDate, int someDay){ + + List dateIntervals = new ArrayList<>(); + for(int index = someDay; index > 0; index--){ + Date lastDay = DateUtils.getSomeDay(businessDate, -index); + + Date beginTime = DateUtils.getStartOfDay(lastDay); + Date endTime = DateUtils.getEndOfDay(lastDay); + dateIntervals.add(new DateInterval(beginTime, endTime)); + } + return dateIntervals; + } + + /** + * get interval between this month first day and businessDate + * @param businessDate + * @return + */ + public static List getThisMonthInterval(Date businessDate) { + Date firstDay = DateUtils.getFirstDayOfMonth(businessDate); + return getDateIntervalListBetweenTwoDates(firstDay, businessDate); + } + + /** + * get interval between last month first day and last day + * @param businessDate + * @return + */ + public static List getLastMonthInterval(Date businessDate) { + + Date firstDayThisMonth = DateUtils.getFirstDayOfMonth(businessDate); + Date lastDay = DateUtils.getSomeDay(firstDayThisMonth, -1); + Date firstDay = DateUtils.getFirstDayOfMonth(lastDay); + return getDateIntervalListBetweenTwoDates( firstDay, lastDay); + } + + + /** + * get interval on first/last day of the last month + * @param businessDate + * @param isBeginDay + * @return + */ + public static List getLastMonthBeginInterval(Date businessDate, boolean isBeginDay) { + + Date firstDayThisMonth = DateUtils.getFirstDayOfMonth(businessDate); + Date lastDay = DateUtils.getSomeDay(firstDayThisMonth, -1); + Date firstDay = DateUtils.getFirstDayOfMonth(lastDay); + if(isBeginDay){ + return getDateIntervalListBetweenTwoDates(firstDay, firstDay); + }else{ + return getDateIntervalListBetweenTwoDates(lastDay, lastDay); + } + } + + /** + * get interval between monday to businessDate of this week + * @param businessDate + * @return + */ + public static List getThisWeekInterval(Date businessDate) { + Date mondayThisWeek = DateUtils.getMonday(businessDate); + return getDateIntervalListBetweenTwoDates(mondayThisWeek, businessDate); + } + + /** + * get interval between monday to sunday of last week + * default set monday the first day of week + * @param businessDate + * @return + */ + public static List getLastWeekInterval(Date businessDate) { + Date mondayThisWeek = DateUtils.getMonday(businessDate); + Date sunday = DateUtils.getSomeDay(mondayThisWeek, -1); + Date monday = DateUtils.getMonday(sunday); + return getDateIntervalListBetweenTwoDates(monday, sunday); + } + + /** + * get interval on the day of last week + * default set monday the first day of week + * @param businessDate + * @param dayOfWeek monday:1,tuesday:2,wednesday:3,thursday:4,friday:5,saturday:6,sunday:7 + * @return + */ + public static List getLastWeekOneDayInterval(Date businessDate, int dayOfWeek) { + Date mondayThisWeek = DateUtils.getMonday(businessDate); + Date sunday = DateUtils.getSomeDay(mondayThisWeek, -1); + Date monday = DateUtils.getMonday(sunday); + Date destDay = DateUtils.getSomeDay(monday, dayOfWeek -1); + return getDateIntervalListBetweenTwoDates(destDay, destDay); + } + + public static List getDateIntervalListBetweenTwoDates(Date firstDay, Date lastDay) { + List dateIntervals = new ArrayList<>(); + while(!firstDay.after(lastDay)){ + Date beginTime = DateUtils.getStartOfDay(firstDay); + Date endTime = DateUtils.getEndOfDay(firstDay); + dateIntervals.add(new DateInterval(beginTime, endTime)); + firstDay = DateUtils.getSomeDay(firstDay, 1); + } + return dateIntervals; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/BusinessTimeUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/BusinessTimeUtils.java new file mode 100644 index 0000000000..ad954a7569 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/BusinessTimeUtils.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils.placeholder; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; + +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +import static org.apache.dolphinscheduler.common.Constants.PARAMETER_FORMAT_DATE; +import static org.apache.dolphinscheduler.common.Constants.PARAMETER_FORMAT_TIME; +import static org.apache.dolphinscheduler.common.utils.DateUtils.format; +import static org.apache.commons.lang.time.DateUtils.addDays; + + +/** + * business time utils + */ +public class BusinessTimeUtils { + + /** + * get business time in parameters by different command types + * + * @param commandType command type + * @param runTime run time or schedule time + */ + public static Map getBusinessTime(CommandType commandType, Date runTime) { + Date businessDate = runTime; + switch (commandType) { + case COMPLEMENT_DATA: + break; + case START_PROCESS: + case START_CURRENT_TASK_PROCESS: + case RECOVER_TOLERANCE_FAULT_PROCESS: + case RECOVER_SUSPENDED_PROCESS: + case START_FAILURE_TASK_PROCESS: + case REPEAT_RUNNING: + case SCHEDULER: + default: + businessDate = addDays(new Date(), -1); + if (runTime != null){ + /** + * If there is a scheduled time, take the scheduling time. Recovery from failed nodes, suspension of recovery, re-run for scheduling + */ + businessDate = addDays(runTime, -1); + } + break; + } + Date businessCurrentDate = addDays(businessDate, 1); + Map result = new HashMap<>(); + result.put(Constants.PARAMETER_CURRENT_DATE, format(businessCurrentDate, PARAMETER_FORMAT_DATE)); + result.put(Constants.PARAMETER_BUSINESS_DATE, format(businessDate, PARAMETER_FORMAT_DATE)); + result.put(Constants.PARAMETER_DATETIME, format(businessCurrentDate, PARAMETER_FORMAT_TIME)); + return result; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java new file mode 100644 index 0000000000..aaac020a0d --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils.placeholder; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; + +/** + * placeholder utils + */ +public class PlaceholderUtils { + + private static final Logger logger = LoggerFactory.getLogger(PlaceholderUtils.class); + + /** + * Prefix of the position to be replaced + */ + public static final String placeholderPrefix = "${"; + + /** + * The suffix of the position to be replaced + */ + public static final String placeholderSuffix = "}"; + + + /** + * Replaces all placeholders of format {@code ${name}} with the value returned + * from the supplied {@link PropertyPlaceholderHelper.PlaceholderResolver}. + * + * @param value the value containing the placeholders to be replaced + * @param paramsMap placeholder data dictionary + * @return the supplied value with placeholders replaced inline + */ + public static String replacePlaceholders(String value, Map paramsMap, boolean ignoreUnresolvablePlaceholders) { + //replacement tool, parameter key will be replaced by value,if can't match , will throw an exception + PropertyPlaceholderHelper strictHelper = getPropertyPlaceholderHelper(false); + + //Non-strict replacement tool implementation, when the position to be replaced does not get the corresponding value, the current position is ignored, and the next position is replaced. + PropertyPlaceholderHelper nonStrictHelper = getPropertyPlaceholderHelper(true); + + PropertyPlaceholderHelper helper = (ignoreUnresolvablePlaceholders ? nonStrictHelper : strictHelper); + + //the PlaceholderResolver to use for replacement + return helper.replacePlaceholders(value, new PropertyPlaceholderResolver(value, paramsMap)); + } + + /** + * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. + * @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should + * be ignored ({@code true}) or cause an exception ({@code false}) + */ + public static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) { + + return new PropertyPlaceholderHelper(placeholderPrefix, placeholderSuffix, null, ignoreUnresolvablePlaceholders); + } + + /** + * Placeholder replacement resolver + */ + private static class PropertyPlaceholderResolver implements PropertyPlaceholderHelper.PlaceholderResolver { + + private final String value; + + private final Map paramsMap; + + public PropertyPlaceholderResolver(String value, Map paramsMap) { + this.value = value; + this.paramsMap = paramsMap; + } + + @Override + public String resolvePlaceholder(String placeholderName) { + try { + return paramsMap.get(placeholderName); + } catch (Exception ex) { + logger.error(String.format("resolve placeholder '%s' in [ %s ]" , placeholderName, value), ex); + return null; + } + } + } + + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PropertyPlaceholderHelper.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PropertyPlaceholderHelper.java new file mode 100644 index 0000000000..d142bec5fe --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PropertyPlaceholderHelper.java @@ -0,0 +1,254 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils.placeholder; + + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.util.*; + +/** + * Utility class for working with Strings that have placeholder values in them. A placeholder takes the form + * {@code ${name}}. Using {@code PropertyPlaceholderHelper} these placeholders can be substituted for + * user-supplied values.

Values for substitution can be supplied using a {@link Properties} instance or + * using a {@link PlaceholderResolver}. + * + * @author Juergen Hoeller + * @author Rob Harrop + * @since 3.0 + */ +public class PropertyPlaceholderHelper { + + private static final Log logger = LogFactory.getLog(PropertyPlaceholderHelper.class); + + private static final Map wellKnownSimplePrefixes = new HashMap(4); + + static { + wellKnownSimplePrefixes.put("}", "{"); + wellKnownSimplePrefixes.put("]", "["); + wellKnownSimplePrefixes.put(")", "("); + } + + + private final String placeholderPrefix; + + private final String placeholderSuffix; + + private final String simplePrefix; + + private final String valueSeparator; + + private final boolean ignoreUnresolvablePlaceholders; + + + /** + * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. + * Unresolvable placeholders are ignored. + * @param placeholderPrefix the prefix that denotes the start of a placeholder + * @param placeholderSuffix the suffix that denotes the end of a placeholder + */ + public PropertyPlaceholderHelper(String placeholderPrefix, String placeholderSuffix) { + this(placeholderPrefix, placeholderSuffix, null, true); + } + + /** + * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. + * @param placeholderPrefix the prefix that denotes the start of a placeholder + * @param placeholderSuffix the suffix that denotes the end of a placeholder + * @param valueSeparator the separating character between the placeholder variable + * and the associated default value, if any + * @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should + * be ignored ({@code true}) or cause an exception ({@code false}) + */ + public PropertyPlaceholderHelper(String placeholderPrefix, String placeholderSuffix, + String valueSeparator, boolean ignoreUnresolvablePlaceholders) { + + notNull(placeholderPrefix, "'placeholderPrefix' must not be null"); + notNull(placeholderSuffix, "'placeholderSuffix' must not be null"); + this.placeholderPrefix = placeholderPrefix; + this.placeholderSuffix = placeholderSuffix; + String simplePrefixForSuffix = wellKnownSimplePrefixes.get(this.placeholderSuffix); + if (simplePrefixForSuffix != null && this.placeholderPrefix.endsWith(simplePrefixForSuffix)) { + this.simplePrefix = simplePrefixForSuffix; + } + else { + this.simplePrefix = this.placeholderPrefix; + } + this.valueSeparator = valueSeparator; + this.ignoreUnresolvablePlaceholders = ignoreUnresolvablePlaceholders; + } + + + /** + * Replaces all placeholders of format {@code ${name}} with the corresponding + * property from the supplied {@link Properties}. + * @param value the value containing the placeholders to be replaced + * @param properties the {@code Properties} to use for replacement + * @return the supplied value with placeholders replaced inline + */ + public String replacePlaceholders(String value, final Properties properties) { + notNull(properties, "'properties' must not be null"); + return replacePlaceholders(value, new PlaceholderResolver() { + @Override + public String resolvePlaceholder(String placeholderName) { + return properties.getProperty(placeholderName); + } + }); + } + + /** + * Replaces all placeholders of format {@code ${name}} with the value returned + * from the supplied {@link PlaceholderResolver}. + * @param value the value containing the placeholders to be replaced + * @param placeholderResolver the {@code PlaceholderResolver} to use for replacement + * @return the supplied value with placeholders replaced inline + */ + public String replacePlaceholders(String value, PlaceholderResolver placeholderResolver) { + notNull(value, "'value' must not be null"); + return parseStringValue(value, placeholderResolver, new HashSet()); + } + + protected String parseStringValue( + String value, PlaceholderResolver placeholderResolver, Set visitedPlaceholders) { + + StringBuilder result = new StringBuilder(value); + + int startIndex = value.indexOf(this.placeholderPrefix); + while (startIndex != -1) { + int endIndex = findPlaceholderEndIndex(result, startIndex); + if (endIndex != -1) { + String placeholder = result.substring(startIndex + this.placeholderPrefix.length(), endIndex); + String originalPlaceholder = placeholder; + if (!visitedPlaceholders.add(originalPlaceholder)) { + throw new IllegalArgumentException( + "Circular placeholder reference '" + originalPlaceholder + "' in property definitions"); + } + // Recursive invocation, parsing placeholders contained in the placeholder key. + placeholder = parseStringValue(placeholder, placeholderResolver, visitedPlaceholders); + // Now obtain the value for the fully resolved key... + String propVal = placeholderResolver.resolvePlaceholder(placeholder); + if (propVal == null && this.valueSeparator != null) { + int separatorIndex = placeholder.indexOf(this.valueSeparator); + if (separatorIndex != -1) { + String actualPlaceholder = placeholder.substring(0, separatorIndex); + String defaultValue = placeholder.substring(separatorIndex + this.valueSeparator.length()); + propVal = placeholderResolver.resolvePlaceholder(actualPlaceholder); + if (propVal == null) { + propVal = defaultValue; + } + } + } + if (propVal != null) { + // Recursive invocation, parsing placeholders contained in the + // previously resolved placeholder value. + propVal = parseStringValue(propVal, placeholderResolver, visitedPlaceholders); + result.replace(startIndex, endIndex + this.placeholderSuffix.length(), propVal); + if (logger.isTraceEnabled()) { + logger.trace("Resolved placeholder '" + placeholder + "'"); + } + startIndex = result.indexOf(this.placeholderPrefix, startIndex + propVal.length()); + } + else if (this.ignoreUnresolvablePlaceholders) { + // Proceed with unprocessed value. + startIndex = result.indexOf(this.placeholderPrefix, endIndex + this.placeholderSuffix.length()); + } + else { + throw new IllegalArgumentException("Could not resolve placeholder '" + + placeholder + "'" + " in value \"" + value + "\""); + } + visitedPlaceholders.remove(originalPlaceholder); + } + else { + startIndex = -1; + } + } + + return result.toString(); + } + + private int findPlaceholderEndIndex(CharSequence buf, int startIndex) { + int index = startIndex + this.placeholderPrefix.length(); + int withinNestedPlaceholder = 0; + while (index < buf.length()) { + if (substringMatch(buf, index, this.placeholderSuffix)) { + if (withinNestedPlaceholder > 0) { + withinNestedPlaceholder--; + index = index + this.placeholderSuffix.length(); + } + else { + return index; + } + } + else if (substringMatch(buf, index, this.simplePrefix)) { + withinNestedPlaceholder++; + index = index + this.simplePrefix.length(); + } + else { + index++; + } + } + return -1; + } + + + /** + * Strategy interface used to resolve replacement values for placeholders contained in Strings. + */ + public interface PlaceholderResolver { + + /** + * Resolve the supplied placeholder name to the replacement value. + * @param placeholderName the name of the placeholder to resolve + * @return the replacement value, or {@code null} if no replacement is to be made + */ + String resolvePlaceholder(String placeholderName); + } + + /** + * Test whether the given string matches the given substring + * at the given index. + * @param str the original string (or StringBuilder) + * @param index the index in the original string to start matching against + * @param substring the substring to match at the given index + */ + public static boolean substringMatch(CharSequence str, int index, CharSequence substring) { + for (int j = 0; j < substring.length(); j++) { + int i = index + j; + if (i >= str.length() || str.charAt(i) != substring.charAt(j)) { + return false; + } + } + return true; + } + + /** + * Assert that an object is not {@code null}. + *

Assert.notNull(clazz, "The class must not be null");
+ * @param object the object to check + * @param message the exception message to use if the assertion fails + * @throws IllegalArgumentException if the object is {@code null} + */ + public static void notNull(Object object, String message) { + if (object == null) { + throw new IllegalArgumentException(message); + } + } + + +} + diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java new file mode 100644 index 0000000000..510f026585 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java @@ -0,0 +1,512 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils.placeholder; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * time place holder utils + */ +public class TimePlaceholderUtils { + private static final Logger logger = LoggerFactory.getLogger(TimePlaceholderUtils.class); + + /** + * Prefix of the position to be replaced + */ + public static final String placeholderPrefix = "$["; + + /** + * The suffix of the position to be replaced + */ + public static final String placeholderSuffix = "]"; + + /** + * Replaces all placeholders of format {@code ${name}} with the value returned + * from the supplied {@link PropertyPlaceholderHelper.PlaceholderResolver}. + * + * @param value the value containing the placeholders to be replaced + * @param date custom date + * @param ignoreUnresolvablePlaceholders + * @return the supplied value with placeholders replaced inline + */ + public static String replacePlaceholders(String value, Date date, boolean ignoreUnresolvablePlaceholders) { + PropertyPlaceholderHelper strictHelper = getPropertyPlaceholderHelper(false); + PropertyPlaceholderHelper nonStrictHelper = getPropertyPlaceholderHelper(true); + + PropertyPlaceholderHelper helper = (ignoreUnresolvablePlaceholders ? nonStrictHelper : strictHelper); + return helper.replacePlaceholders(value, new TimePlaceholderResolver(value, date)); + } + + + /** + * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. + * @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should + * be ignored ({@code true}) or cause an exception ({@code false}) + */ + private static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) { + return new PropertyPlaceholderHelper(placeholderPrefix, placeholderSuffix, null, ignoreUnresolvablePlaceholders); + } + + /** + * calculate expression's value + * + * @param expression + * @return + */ + public static Integer calculate(String expression) { + expression = StringUtils.trim(expression); + expression = convert(expression); + + List result = string2List(expression); + result = convert2SuffixList(result); + + return calculate(result); + } + + /** + * Change the sign in the expression to P (positive) N (negative) + * + * @param expression + * @return eg. "-3+-6*(+8)-(-5) -> S3+S6*(P8)-(S5)" + */ + private static String convert(String expression) { + char[] arr = expression.toCharArray(); + + for (int i = 0; i < arr.length; i++) { + if (arr[i] == SUBTRACT_CHAR) { + if (i == 0) { + arr[i] = N; + } else { + char c = arr[i - 1]; + if (c == ADD_CHAR || c == SUBTRACT_CHAR || c == MULTIPLY_CHAR || c == DIVISION_CHAR || c == LEFT_BRACE_CHAR) { + arr[i] = N; + } + } + } else if (arr[i] == ADD_CHAR) { + if (i == 0) { + arr[i] = P; + } else { + char c = arr[i - 1]; + if (c == ADD_CHAR || c == SUBTRACT_CHAR || c == MULTIPLY_CHAR || c == DIVISION_CHAR || c == LEFT_BRACE_CHAR) { + arr[i] = P; + } + } + } + } + + return new String(arr); + } + + /** + * to suffix expression + * + * @param srcList + * @return + */ + private static List convert2SuffixList(List srcList) { + List result = new ArrayList<>(); + Stack stack = new Stack<>(); + + for (int i = 0; i < srcList.size(); i++) { + if (Character.isDigit(srcList.get(i).charAt(0))) { + result.add(srcList.get(i)); + } else { + switch (srcList.get(i).charAt(0)) { + case LEFT_BRACE_CHAR: + stack.push(srcList.get(i)); + break; + case RIGHT_BRACE_CHAR: + while (!LEFT_BRACE_STRING.equals(stack.peek())) { + result.add(stack.pop()); + } + stack.pop(); + break; + default: + while (!stack.isEmpty() && compare(stack.peek(), srcList.get(i))) { + result.add(stack.pop()); + } + stack.push(srcList.get(i)); + break; + } + } + } + + while (!stack.isEmpty()) { + result.add(stack.pop()); + } + + return result; + } + + /** + * Calculate the suffix expression + * + * @param result + * @return + */ + private static Integer calculate(List result) { + Stack stack = new Stack<>(); + for (int i = 0; i < result.size(); i++) { + if (Character.isDigit(result.get(i).charAt(0))) { + stack.push(Integer.parseInt(result.get(i))); + } else { + Integer backInt = stack.pop(); + Integer frontInt = 0; + char op = result.get(i).charAt(0); + + if (!(op == P || op == N)) { + frontInt = stack.pop(); + } + + Integer res = 0; + switch (result.get(i).charAt(0)) { + case P: + res = frontInt + backInt; + break; + case N: + res = frontInt - backInt; + break; + case ADD_CHAR: + res = frontInt + backInt; + break; + case SUBTRACT_CHAR: + res = frontInt - backInt; + break; + case MULTIPLY_CHAR: + res = frontInt * backInt; + break; + case DIVISION_CHAR: + res = frontInt / backInt; + break; + default: + break; + } + stack.push(res); + } + } + + return stack.pop(); + } + + /** + * string to list + * + * @param expression + * @return list + */ + private static List string2List(String expression) { + List result = new ArrayList<>(); + String num = ""; + for (int i = 0; i < expression.length(); i++) { + if (Character.isDigit(expression.charAt(i))) { + num = num + expression.charAt(i); + } else { + if (!num.isEmpty()) { + result.add(num); + } + result.add(expression.charAt(i) + ""); + num = ""; + } + } + + if (!num.isEmpty()) { + result.add(num); + } + + return result; + } + + /** + * compare loginUser level + * + * @param peek + * @param cur + * @return true or false + */ + private static boolean compare(String peek, String cur) { + if (MULTIPLY_STRING.equals(peek) && (DIVISION_STRING.equals(cur) || MULTIPLY_STRING.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { + return true; + } else if (DIVISION_STRING.equals(peek) && (DIVISION_STRING.equals(cur) || MULTIPLY_STRING.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { + return true; + } else if (ADD_STRING.equals(peek) && (ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { + return true; + } else { + return SUBTRACT_STRING.equals(peek) && (ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur)); + } + + } + + /** + * Placeholder replacement resolver + */ + private static class TimePlaceholderResolver implements + PropertyPlaceholderHelper.PlaceholderResolver { + + private final String value; + + private final Date date; + + public TimePlaceholderResolver(String value, Date date) { + this.value = value; + this.date = date; + } + + @Override + public String resolvePlaceholder(String placeholderName) { + try { + return calculateTime(placeholderName, date); + } catch (Exception ex) { + logger.error(String.format("resolve placeholder '%s' in [ %s ]" , placeholderName, value), ex); + return null; + } + } + } + + + /** + * calculate time + * + * @param date date + * @return calculate time + */ + private static String calculateTime(String expression, Date date) { + // After N years: $[add_months(yyyyMMdd,12*N)], the first N months: $[add_months(yyyyMMdd,-N)], etc + String value; + + try { + if (expression.startsWith(Constants.TIMESTAMP)) { + String timeExpression = expression.substring(Constants.TIMESTAMP.length() + 1, expression.length() - 1); + + Map.Entry entry = calcTimeExpression(timeExpression, date); + + String dateStr = DateUtils.format(entry.getKey(), entry.getValue()); + + Date timestamp = DateUtils.parse(dateStr, Constants.PARAMETER_FORMAT_TIME); + + value = String.valueOf(timestamp.getTime() / 1000); + } else { + Map.Entry entry = calcTimeExpression(expression, date); + value = DateUtils.format(entry.getKey(), entry.getValue()); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw e; + } + + return value; + } + + /** + * calculate time expresstion + * + * @return + */ + public static Map.Entry calcTimeExpression(String expression, Date date) { + Map.Entry resultEntry; + + if (expression.startsWith(Constants.ADD_MONTHS)) { + resultEntry = calcMonths(expression, date); + } else if (expression.startsWith(Constants.MONTH_BEGIN)) { + resultEntry = calcMonthBegin(expression, date); + } else if (expression.startsWith(Constants.MONTH_END)) { + resultEntry = calcMonthEnd(expression, date); + } else if (expression.startsWith(Constants.WEEK_BEGIN)) { + resultEntry = calcWeekStart(expression, date); + } else if (expression.startsWith(Constants.WEEK_END)) { + resultEntry = calcWeekEnd(expression, date); + } else { + resultEntry = calcMinutes(expression, date); + } + + return resultEntry; + } + + /** + * get first day of month + * + * @return + */ + public static Map.Entry calcMonthBegin(String expression, Date date) { + String addMonthExpr = expression.substring(Constants.MONTH_BEGIN.length() + 1, expression.length() - 1); + String[] params = addMonthExpr.split(Constants.COMMA); + + if (params.length == 2) { + String dateFormat = params[0]; + String dayExpr = params[1]; + Integer day = calculate(dayExpr); + Date targetDate = DateUtils.getFirstDayOfMonth(date); + targetDate = org.apache.commons.lang.time.DateUtils.addDays(targetDate, day); + + return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); + } + + throw new RuntimeException("expression not valid"); + } + + /** + * get last day of month + * + */ + public static Map.Entry calcMonthEnd(String expression, Date date) { + String addMonthExpr = expression.substring(Constants.MONTH_END.length() + 1, expression.length() - 1); + String[] params = addMonthExpr.split(Constants.COMMA); + + if (params.length == 2) { + String dateFormat = params[0]; + String dayExpr = params[1]; + Integer day = calculate(dayExpr); + Date targetDate = DateUtils.getLastDayOfMonth(date); + targetDate = org.apache.commons.lang.time.DateUtils.addDays(targetDate, day); + + return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); + } + + throw new RuntimeException("expression not valid"); + } + + /** + * get first day of week + * + * @return monday + */ + public static Map.Entry calcWeekStart(String expression, Date date) { + String addMonthExpr = expression.substring(Constants.WEEK_BEGIN.length() + 1, expression.length() - 1); + String[] params = addMonthExpr.split(Constants.COMMA); + + if (params.length == 2) { + String dateFormat = params[0]; + String dayExpr = params[1]; + Integer day = calculate(dayExpr); + Date targetDate = DateUtils.getMonday(date); + targetDate = org.apache.commons.lang.time.DateUtils.addDays(targetDate, day); + return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); + } + + throw new RuntimeException("expression not valid"); + } + + /** + * get last day of week + * + */ + public static Map.Entry calcWeekEnd(String expression, Date date) { + String addMonthExpr = expression.substring(Constants.WEEK_END.length() + 1, expression.length() - 1); + String[] params = addMonthExpr.split(Constants.COMMA); + + if (params.length == 2) { + String dateFormat = params[0]; + String dayExpr = params[1]; + Integer day = calculate(dayExpr); + Date targetDate = DateUtils.getSunday(date); + targetDate = org.apache.commons.lang.time.DateUtils.addDays(targetDate, day); + + return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); + } + + throw new RuntimeException("Expression not valid"); + } + + /** + * calc months expression + * + * @return + */ + public static Map.Entry calcMonths(String expression, Date date) { + String addMonthExpr = expression.substring(Constants.ADD_MONTHS.length() + 1, expression.length() - 1); + String[] params = addMonthExpr.split(Constants.COMMA); + + if (params.length == 2) { + String dateFormat = params[0]; + String monthExpr = params[1]; + Integer addMonth = calculate(monthExpr); + Date targetDate = org.apache.commons.lang.time.DateUtils.addMonths(date, addMonth); + + return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); + } + + throw new RuntimeException("expression not valid"); + } + + /** + * calculate time expression + * + * + * @return + */ + public static Map.Entry calcMinutes(String expression, Date date) { + if (expression.contains("+")) { + int index = expression.lastIndexOf('+'); + + if (Character.isDigit(expression.charAt(index + 1))) { + String addMinuteExpr = expression.substring(index + 1); + Date targetDate = org.apache.commons.lang.time.DateUtils + .addMinutes(date, calcMinutes(addMinuteExpr)); + String dateFormat = expression.substring(0, index); + + return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); + } + } else if (expression.contains("-")) { + int index = expression.lastIndexOf('-'); + + if (Character.isDigit(expression.charAt(index + 1))) { + String addMinuteExpr = expression.substring(index + 1); + Date targetDate = org.apache.commons.lang.time.DateUtils + .addMinutes(date, 0 - calcMinutes(addMinuteExpr)); + String dateFormat = expression.substring(0, index); + + return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); + } + + // yyyy-MM-dd/HH:mm:ss + return new AbstractMap.SimpleImmutableEntry<>(date, expression); + } + + // $[HHmmss] + return new AbstractMap.SimpleImmutableEntry<>(date, expression); + } + + /** + * calculate need minutes + * + * @param minuteExpression + * @return + */ + public static Integer calcMinutes(String minuteExpression) { + int index = minuteExpression.indexOf("/"); + + String calcExpression; + + if (index == -1) { + calcExpression = String.format("60*24*(%s)", minuteExpression); + } else { + + calcExpression = String.format("60*24*(%s)%s", minuteExpression.substring(0, index), + minuteExpression.substring(index)); + } + + return calculate(calcExpression); + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/AbstractZKClient.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/AbstractZKClient.java new file mode 100644 index 0000000000..3c646c57ec --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/AbstractZKClient.java @@ -0,0 +1,583 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.zk; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.IStoppable; +import org.apache.dolphinscheduler.common.enums.ZKNodeType; +import org.apache.dolphinscheduler.common.model.MasterServer; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.common.utils.ResInfo; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.lang3.StringUtils; +import org.apache.curator.RetryPolicy; +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.CuratorFrameworkFactory; +import org.apache.curator.framework.imps.CuratorFrameworkState; +import org.apache.curator.framework.recipes.locks.InterProcessMutex; +import org.apache.curator.framework.state.ConnectionState; +import org.apache.curator.framework.state.ConnectionStateListener; +import org.apache.curator.retry.ExponentialBackoffRetry; +import org.apache.zookeeper.CreateMode; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.dolphinscheduler.common.Constants.*; + + +/** + * abstract zookeeper client + */ +public abstract class AbstractZKClient { + + private static final Logger logger = LoggerFactory.getLogger(AbstractZKClient.class); + + /** + * load configuration file + */ + protected static Configuration conf; + + protected CuratorFramework zkClient = null; + + /** + * server stop or not + */ + protected IStoppable stoppable = null; + + + static { + try { + conf = new PropertiesConfiguration(Constants.ZOOKEEPER_PROPERTIES_PATH); + }catch (ConfigurationException e){ + logger.error("load configuration failed : " + e.getMessage(),e); + System.exit(1); + } + } + + + public AbstractZKClient() { + + // retry strategy + RetryPolicy retryPolicy = new ExponentialBackoffRetry( + Integer.parseInt(conf.getString(Constants.ZOOKEEPER_RETRY_SLEEP)), + Integer.parseInt(conf.getString(Constants.ZOOKEEPER_RETRY_MAXTIME))); + + try{ + // crate zookeeper client + zkClient = CuratorFrameworkFactory.builder() + .connectString(getZookeeperQuorum()) + .retryPolicy(retryPolicy) + .sessionTimeoutMs(1000 * Integer.parseInt(conf.getString(Constants.ZOOKEEPER_SESSION_TIMEOUT))) + .connectionTimeoutMs(1000 * Integer.parseInt(conf.getString(Constants.ZOOKEEPER_CONNECTION_TIMEOUT))) + .build(); + + zkClient.start(); + initStateLister(); + + }catch(Exception e){ + logger.error("create zookeeper connect failed : " + e.getMessage(),e); + System.exit(-1); + } + } + + /** + * + * register status monitoring events for zookeeper clients + */ + public void initStateLister(){ + if(zkClient == null) { + return; + } + // add ConnectionStateListener monitoring zookeeper connection state + ConnectionStateListener csLister = new ConnectionStateListener() { + + @Override + public void stateChanged(CuratorFramework client, ConnectionState newState) { + logger.info("state changed , current state : " + newState.name()); + /** + * probably session expired + */ + if(newState == ConnectionState.LOST){ + // if lost , then exit + logger.info("current zookeepr connection state : connection lost "); + } + } + }; + + zkClient.getConnectionStateListenable().addListener(csLister); + } + + + public void start() { + zkClient.start(); + logger.info("zookeeper start ..."); + } + + public void close() { + zkClient.getZookeeperClient().close(); + zkClient.close(); + logger.info("zookeeper close ..."); + } + + + /** + * heartbeat for zookeeper + * @param znode + */ + public void heartBeatForZk(String znode, String serverType){ + try { + + //check dead or not in zookeeper + if(zkClient.getState() == CuratorFrameworkState.STOPPED || checkIsDeadServer(znode, serverType)){ + stoppable.stop("i was judged to death, release resources and stop myself"); + return; + } + + byte[] bytes = zkClient.getData().forPath(znode); + String resInfoStr = new String(bytes); + String[] splits = resInfoStr.split(Constants.COMMA); + if (splits.length != Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH){ + return; + } + String str = splits[0] + Constants.COMMA +splits[1] + Constants.COMMA + + OSUtils.cpuUsage() + Constants.COMMA + + OSUtils.memoryUsage() + Constants.COMMA + + splits[4] + Constants.COMMA + + DateUtils.dateToString(new Date()); + zkClient.setData().forPath(znode,str.getBytes()); + + } catch (Exception e) { + logger.error("heartbeat for zk failed : " + e.getMessage(), e); + stoppable.stop("heartbeat for zk exception, release resources and stop myself"); + } + } + + /** + * check dead server or not , if dead, stop self + * + * @param zNode node path + * @param serverType master or worker prefix + * @throws Exception + */ + protected boolean checkIsDeadServer(String zNode, String serverType) throws Exception { + //ip_sequenceno + String[] zNodesPath = zNode.split("\\/"); + String ipSeqNo = zNodesPath[zNodesPath.length - 1]; + + String type = serverType.equals(MASTER_PREFIX) ? MASTER_PREFIX : WORKER_PREFIX; + String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + type + UNDERLINE + ipSeqNo; + + if(zkClient.checkExists().forPath(zNode) == null || + zkClient.checkExists().forPath(deadServerPath) != null ){ + return true; + } + + + return false; + } + + + public void removeDeadServerByHost(String host, String serverType) throws Exception { + List deadServers = zkClient.getChildren().forPath(getDeadZNodeParentPath()); + for(String serverPath : deadServers){ + if(serverPath.startsWith(serverType+UNDERLINE+host)){ + String server = getDeadZNodeParentPath() + SINGLE_SLASH + serverPath; + zkClient.delete().forPath(server); + logger.info("{} server {} deleted from zk dead server path success" , serverType , host); + } + } + } + + + /** + * create zookeeper path according the zk node type. + * @param zkNodeType + * @return + * @throws Exception + */ + private String createZNodePath(ZKNodeType zkNodeType) throws Exception { + // specify the format of stored data in ZK nodes + String heartbeatZKInfo = ResInfo.getHeartBeatInfo(new Date()); + // create temporary sequence nodes for master znode + String parentPath = getZNodeParentPath(zkNodeType); + String serverPathPrefix = parentPath + "/" + OSUtils.getHost(); + String registerPath = zkClient.create().withMode(CreateMode.EPHEMERAL_SEQUENTIAL).forPath( + serverPathPrefix + "_", heartbeatZKInfo.getBytes()); + logger.info("register {} node {} success" , zkNodeType.toString(), registerPath); + return registerPath; + } + + /** + * register server, if server already exists, return null. + * @param zkNodeType + * @return register server path in zookeeper + */ + public String registerServer(ZKNodeType zkNodeType) throws Exception { + String registerPath = null; + String host = OSUtils.getHost(); + if(checkZKNodeExists(host, zkNodeType)){ + logger.error("register failure , {} server already started on host : {}" , + zkNodeType.toString(), host); + return registerPath; + } + registerPath = createZNodePath(zkNodeType); + logger.info("register {} node {} success", zkNodeType.toString(), registerPath); + + // handle dead server + handleDeadServer(registerPath, zkNodeType, Constants.DELETE_ZK_OP); + + return registerPath; + } + + /** + * opType(add): if find dead server , then add to zk deadServerPath + * opType(delete): delete path from zk + * + * @param zNode node path + * @param zkNodeType master or worker + * @param opType delete or add + * @throws Exception + */ + public void handleDeadServer(String zNode, ZKNodeType zkNodeType, String opType) throws Exception { + //ip_sequenceno + String[] zNodesPath = zNode.split("\\/"); + String ipSeqNo = zNodesPath[zNodesPath.length - 1]; + + String type = (zkNodeType == ZKNodeType.MASTER) ? MASTER_PREFIX : WORKER_PREFIX; + + + //check server restart, if restart , dead server path in zk should be delete + if(opType.equals(DELETE_ZK_OP)){ + String[] ipAndSeqNo = ipSeqNo.split(UNDERLINE); + String ip = ipAndSeqNo[0]; + removeDeadServerByHost(ip, type); + + }else if(opType.equals(ADD_ZK_OP)){ + String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + type + UNDERLINE + ipSeqNo; + if(zkClient.checkExists().forPath(deadServerPath) == null){ + //add dead server info to zk dead server path : /dead-servers/ + + zkClient.create().forPath(deadServerPath,(type + UNDERLINE + ipSeqNo).getBytes()); + + logger.info("{} server dead , and {} added to zk dead server path success" , + zkNodeType.toString(), zNode); + } + } + + } + + + + /** + * for stop server + * @param serverStoppable + */ + public void setStoppable(IStoppable serverStoppable){ + this.stoppable = serverStoppable; + } + + /** + * get active master num + * @return + */ + public int getActiveMasterNum(){ + List childrenList = new ArrayList<>(); + try { + // read master node parent path from conf + if(zkClient.checkExists().forPath(getZNodeParentPath(ZKNodeType.MASTER)) != null){ + childrenList = zkClient.getChildren().forPath(getZNodeParentPath(ZKNodeType.MASTER)); + } + } catch (Exception e) { + if(e.getMessage().contains("java.lang.IllegalStateException: instance must be started")){ + logger.error("zookeeper service not started",e); + }else{ + logger.error(e.getMessage(),e); + } + + }finally { + return childrenList.size(); + } + } + + /** + * + * @return zookeeper quorum + */ + public static String getZookeeperQuorum(){ + StringBuilder sb = new StringBuilder(); + String[] zookeeperParamslist = conf.getStringArray(Constants.ZOOKEEPER_QUORUM); + for (String param : zookeeperParamslist) { + sb.append(param).append(Constants.COMMA); + } + + if(sb.length() > 0){ + sb.deleteCharAt(sb.length() - 1); + } + + return sb.toString(); + } + + /** + * get server list. + * @param zkNodeType + * @return + */ + public List getServersList(ZKNodeType zkNodeType){ + Map masterMap = getServerMaps(zkNodeType); + String parentPath = getZNodeParentPath(zkNodeType); + + List masterServers = new ArrayList<>(); + int i = 0; + for(String path : masterMap.keySet()){ + MasterServer masterServer = ResInfo.parseHeartbeatForZKInfo(masterMap.get(path)); + masterServer.setZkDirectory( parentPath + "/"+ path); + masterServer.setId(i); + i ++; + masterServers.add(masterServer); + } + return masterServers; + } + + /** + * get master server list map. + * result : {host : resource info} + * @return + */ + public Map getServerMaps(ZKNodeType zkNodeType){ + + Map masterMap = new HashMap<>(); + try { + String path = getZNodeParentPath(zkNodeType); + List serverList = getZkClient().getChildren().forPath(path); + for(String server : serverList){ + byte[] bytes = getZkClient().getData().forPath(path + "/" + server); + masterMap.putIfAbsent(server, new String(bytes)); + } + } catch (Exception e) { + logger.error("get server list failed : " + e.getMessage(), e); + } + + return masterMap; + } + + /** + * check the zookeeper node already exists + * @param host + * @param zkNodeType + * @return + * @throws Exception + */ + public boolean checkZKNodeExists(String host, ZKNodeType zkNodeType) { + String path = getZNodeParentPath(zkNodeType); + if(StringUtils.isEmpty(path)){ + logger.error("check zk node exists error, host:{}, zk node type:{}", + host, zkNodeType.toString()); + return false; + } + Map serverMaps = getServerMaps(zkNodeType); + for(String hostKey : serverMaps.keySet()){ + if(hostKey.startsWith(host)){ + return true; + } + } + return false; + } + + /** + * get zkclient + * @return + */ + public CuratorFramework getZkClient() { + return zkClient; + } + + /** + * get worker node parent path + * @return + */ + protected String getWorkerZNodeParentPath(){return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_WORKERS);}; + + /** + * get master node parent path + * @return + */ + protected String getMasterZNodeParentPath(){return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_MASTERS);} + + /** + * get master lock path + * @return + */ + public String getMasterLockPath(){ + return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_MASTERS); + } + + /** + * get zookeeper node parent path + * @param zkNodeType + * @return + */ + public String getZNodeParentPath(ZKNodeType zkNodeType) { + String path = ""; + switch (zkNodeType){ + case MASTER: + return getMasterZNodeParentPath(); + case WORKER: + return getWorkerZNodeParentPath(); + case DEAD_SERVER: + return getDeadZNodeParentPath(); + default: + break; + } + return path; + } + + /** + * get dead server node parent path + * @return + */ + protected String getDeadZNodeParentPath(){ + return conf.getString(ZOOKEEPER_ESCHEDULER_DEAD_SERVERS); + } + + /** + * get master start up lock path + * @return + */ + public String getMasterStartUpLockPath(){ + return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS); + } + + /** + * get master failover lock path + * @return + */ + public String getMasterFailoverLockPath(){ + return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_MASTERS); + } + + /** + * get worker failover lock path + * @return + */ + public String getWorkerFailoverLockPath(){ + return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_WORKERS); + } + + /** + * release mutex + * @param mutex + */ + public static void releaseMutex(InterProcessMutex mutex) { + if (mutex != null){ + try { + mutex.release(); + } catch (Exception e) { + if(e.getMessage().equals("instance must be started before calling this method")){ + logger.warn("lock release"); + }else{ + logger.error("lock release failed : " + e.getMessage(),e); + } + + } + } + } + + /** + * init system znode + */ + protected void initSystemZNode(){ + try { + createNodePath(getMasterZNodeParentPath()); + createNodePath(getWorkerZNodeParentPath()); + createNodePath(getDeadZNodeParentPath()); + + } catch (Exception e) { + logger.error("init system znode failed : " + e.getMessage(),e); + } + } + + /** + * create zookeeper node path if not exists + * @param zNodeParentPath + * @throws Exception + */ + private void createNodePath(String zNodeParentPath) throws Exception { + if(null == zkClient.checkExists().forPath(zNodeParentPath)){ + zkClient.create().creatingParentContainersIfNeeded() + .withMode(CreateMode.PERSISTENT).forPath(zNodeParentPath); + } + } + + /** + * server self dead, stop all threads + * @param serverHost + * @param zkNodeType + */ + protected boolean checkServerSelfDead(String serverHost, ZKNodeType zkNodeType) { + if (serverHost.equals(OSUtils.getHost())) { + logger.error("{} server({}) of myself dead , stopping...", + zkNodeType.toString(), serverHost); + stoppable.stop(String.format(" {} server {} of myself dead , stopping...", + zkNodeType.toString(), serverHost)); + return true; + } + return false; + } + + /** + * get host ip, string format: masterParentPath/ip_000001/value + * @param path + * @return + */ + protected String getHostByEventDataPath(String path) { + int startIndex = path.lastIndexOf("/")+1; + int endIndex = path.lastIndexOf("_"); + + if(startIndex >= endIndex){ + logger.error("parse ip error"); + return ""; + } + return path.substring(startIndex, endIndex); + } + /** + * acquire zk lock + * @param zkClient + * @param zNodeLockPath + * @throws Exception + */ + public InterProcessMutex acquireZkLock(CuratorFramework zkClient,String zNodeLockPath)throws Exception{ + InterProcessMutex mutex = new InterProcessMutex(zkClient, zNodeLockPath); + mutex.acquire(); + return mutex; + } + + @Override + public String toString() { + return "AbstractZKClient{" + + "zkClient=" + zkClient + + ", deadServerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.DEAD_SERVER) + '\'' + + ", masterZNodeParentPath='" + getZNodeParentPath(ZKNodeType.MASTER) + '\'' + + ", workerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.WORKER) + '\'' + + ", stoppable=" + stoppable + + '}'; + } +} diff --git a/escheduler-common/src/main/resources/common/common.properties b/dolphinscheduler-common/src/main/resources/common/common.properties similarity index 100% rename from escheduler-common/src/main/resources/common/common.properties rename to dolphinscheduler-common/src/main/resources/common/common.properties diff --git a/escheduler-common/src/main/resources/common/hadoop/hadoop.properties b/dolphinscheduler-common/src/main/resources/common/hadoop/hadoop.properties similarity index 100% rename from escheduler-common/src/main/resources/common/hadoop/hadoop.properties rename to dolphinscheduler-common/src/main/resources/common/hadoop/hadoop.properties diff --git a/escheduler-common/src/main/resources/quartz.properties b/dolphinscheduler-common/src/main/resources/quartz.properties similarity index 100% rename from escheduler-common/src/main/resources/quartz.properties rename to dolphinscheduler-common/src/main/resources/quartz.properties diff --git a/escheduler-common/src/main/resources/zookeeper.properties b/dolphinscheduler-common/src/main/resources/zookeeper.properties similarity index 100% rename from escheduler-common/src/main/resources/zookeeper.properties rename to dolphinscheduler-common/src/main/resources/zookeeper.properties diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/graph/DAGTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/graph/DAGTest.java new file mode 100644 index 0000000000..7f3c9b424e --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/graph/DAGTest.java @@ -0,0 +1,356 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.graph; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.junit.Assert.*; + +public class DAGTest { + private DAG graph; + private static final Logger logger = LoggerFactory.getLogger(DAGTest.class); + + @Before + public void setup() { + graph = new DAG<>(); + } + + @After + public void tearDown() { + clear(); + } + + private void clear() { + graph = null; + graph = new DAG<>(); + + assertEquals(graph.getNodesCount(), 0); + } + + + private void makeGraph() { + clear(); + + // 1->2 + // 2->5 + // 3->5 + // 4->6 + // 5->6 + // 6->7 + + for (int i = 1; i <= 7; ++i) { + graph.addNode(i, "v(" + i + ")"); + } + + // 构造边 + assertTrue(graph.addEdge(1, 2)); + + assertTrue(graph.addEdge(2, 5)); + + assertTrue(graph.addEdge(3, 5)); + + assertTrue(graph.addEdge(4, 6)); + + assertTrue(graph.addEdge(5, 6)); + + assertTrue(graph.addEdge(6, 7)); + + assertEquals(graph.getNodesCount(), 7); + assertEquals(graph.getEdgesCount(), 6); + + } + + + /** + * 测试增加顶点 + */ + @Test + public void testAddNode() { + clear(); + + graph.addNode(1, "v(1)"); + graph.addNode(2, null); + graph.addNode(5, "v(5)"); + + assertEquals(graph.getNodesCount(), 3); + + assertEquals(graph.getNode(1), "v(1)"); + assertTrue(graph.containsNode(1)); + + assertFalse(graph.containsNode(10)); + } + + + /** + * 添加边 + */ + @Test + public void testAddEdge() { + clear(); + + assertFalse(graph.addEdge(1, 2, "edge(1 -> 2)", false)); + + graph.addNode(1, "v(1)"); + + assertTrue(graph.addEdge(1, 2, "edge(1 -> 2)",true)); + + graph.addNode(2, "v(2)"); + + assertTrue(graph.addEdge(1, 2, "edge(1 -> 2)",true)); + + assertFalse(graph.containsEdge(1, 3)); + + assertTrue(graph.containsEdge(1, 2)); + assertEquals(graph.getEdgesCount(), 1); + + } + + + /** + * 测试后续结点 + */ + @Test + public void testSubsequentNodes() { + makeGraph(); + + assertEquals(graph.getSubsequentNodes(1).size(), 1); + + } + + + /** + * 测试入度 + */ + @Test + public void testIndegree() { + makeGraph(); + + assertEquals(graph.getIndegree(1), 0); + assertEquals(graph.getIndegree(2), 1); + assertEquals(graph.getIndegree(3), 0); + assertEquals(graph.getIndegree(4), 0); + } + + + /** + * 测试起点 + */ + @Test + public void testBeginNode() { + makeGraph(); + + assertEquals(graph.getBeginNode().size(), 3); + + assertTrue(graph.getBeginNode().contains(1)); + assertTrue(graph.getBeginNode().contains(3)); + assertTrue(graph.getBeginNode().contains(4)); + } + + + /** + * 测试终点 + */ + @Test + public void testEndNode() { + makeGraph(); + + assertEquals(graph.getEndNode().size(), 1); + + assertTrue(graph.getEndNode().contains(7)); + } + + + /** + * 测试环 + */ + @Test + public void testCycle() { + clear(); + + // 构造顶点 + for (int i = 1; i <= 5; ++i) { + graph.addNode(i, "v(" + i + ")"); + } + + // 构造边, 1->2, 2->3, 3->4 + try { + graph.addEdge(1, 2); + graph.addEdge(2, 3); + graph.addEdge(3, 4); + + assertFalse(graph.hasCycle()); + } catch (Exception e) { + e.printStackTrace(); + fail(); + } + + + try { + boolean addResult = graph.addEdge(4, 1);//有环,添加失败 + + if(!addResult){//有环,添加失败 + assertTrue(true); + } + + graph.addEdge(5, 1); + + assertFalse(graph.hasCycle()); + } catch (Exception e) { + e.printStackTrace(); + fail(); + } + + // 重新清空 + clear(); + + // 构造顶点 + for (int i = 1; i <= 5; ++i) { + graph.addNode(i, "v(" + i +")"); + } + + // 构造边, 1->2, 2->3, 3->4 + try { + graph.addEdge(1, 2); + graph.addEdge(2, 3); + graph.addEdge(3, 4); + graph.addEdge(4, 5); + graph.addEdge(5, 2);//会失败,添加不进去,所以下一步无环 + + assertFalse(graph.hasCycle()); + } catch (Exception e) { + e.printStackTrace(); + fail(); + } + } + + + @Test + public void testTopologicalSort(){ + makeGraph(); + + try { + List topoList = new ArrayList<>();//一种拓扑结果是1 3 4 2 5 6 7 + topoList.add(1); + topoList.add(3); + topoList.add(4); + topoList.add(2); + topoList.add(5); + topoList.add(6); + topoList.add(7); + + assertEquals(graph.topologicalSort(),topoList); + } catch (Exception e) { + e.printStackTrace(); + fail(); + } + } + + + @Test + public void testTopologicalSort2() { + clear(); + + graph.addEdge(1, 2, null, true); + graph.addEdge(2, 3, null, true); + graph.addEdge(3, 4, null, true); + graph.addEdge(4, 5, null, true); + graph.addEdge(5, 1, null, false); //因环会添加失败,ERROR级别日志输出 + + try { + List topoList = new ArrayList<>();//拓扑结果是1 2 3 4 5 + topoList.add(1); + topoList.add(2); + topoList.add(3); + topoList.add(4); + topoList.add(5); + + assertEquals(graph.topologicalSort(),topoList); + + } catch (Exception e) { + e.printStackTrace(); + fail(); + } + + } + + + /** + * + */ + @Test + public void testTopologicalSort3() throws Exception { + clear(); + + // 1->2 + // 1->3 + // 2->5 + // 3->4 + // 4->6 + // 5->6 + // 6->7 + // 6->8 + + for (int i = 1; i <= 8; ++i) { + graph.addNode(i, "v(" + i + ")"); + } + + // 构造边 + assertTrue(graph.addEdge(1, 2)); + + assertTrue(graph.addEdge(1, 3)); + + assertTrue(graph.addEdge(2, 5)); + assertTrue(graph.addEdge(3, 4)); + + assertTrue(graph.addEdge(4, 6)); + + assertTrue(graph.addEdge(5, 6)); + + assertTrue(graph.addEdge(6, 7)); + assertTrue(graph.addEdge(6, 8)); + + + + + assertEquals(graph.getNodesCount(), 8); + + logger.info(Arrays.toString(graph.topologicalSort().toArray())); + + List expectedList = new ArrayList<>(); + + for (int i = 1; i <= 8; ++i) { + expectedList.add(i); + + logger.info(i + " subsequentNodes : " + graph.getSubsequentNodes(i)); + } + +// assertArrayEquals(expectedList.toArray(),graph.topologicalSort().toArray()); + + logger.info(6 + " previousNodesb: " + graph.getPreviousNodes(6)); + assertEquals(5, graph.getSubsequentNodes(2).toArray()[0]); + + } + +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java new file mode 100644 index 0000000000..c493f12cc0 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.os; + + +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import oshi.hardware.GlobalMemory; + +import java.math.RoundingMode; +import java.text.DecimalFormat; + + +/** + * OSUtilsTest + */ +public class OSUtilsTest { + + private static Logger logger = LoggerFactory.getLogger(OSUtilsTest.class); +// static SystemInfo si = new SystemInfo(); +// static HardwareAbstractionLayer hal = si.getHardware(); + + + @Test + public void getHost(){ + logger.info(OSUtils.getHost()); + } + + + @Test + public void memoryUsage() { + logger.info("memoryUsage : {}", OSUtils.memoryUsage());// 0.3361799418926239 +// printMemory(hal.getMemory());// 35 % + } + + @Test + public void availablePhysicalMemorySize() { + logger.info("availablePhysicalMemorySize : {}", OSUtils.availablePhysicalMemorySize()); + logger.info("availablePhysicalMemorySize : {}", OSUtils.totalMemorySize() / 10); + } + + + @Test + public void loadAverage() { + logger.info("memoryUsage : {}", OSUtils.loadAverage()); + } + + + private void printMemory(GlobalMemory memory) { + logger.info("memoryUsage : {} %" , (memory.getTotal() - memory.getAvailable()) * 100 / memory.getTotal() ); + } + + + @Test + public void cpuUsage() throws Exception { + logger.info("cpuUsage : {}", OSUtils.cpuUsage()); + Thread.sleep(1000l); + logger.info("cpuUsage : {}", OSUtils.cpuUsage()); + + double cpuUsage = OSUtils.cpuUsage(); + + DecimalFormat df = new DecimalFormat("0.00"); + + df.setRoundingMode(RoundingMode.HALF_UP); + + logger.info("cpuUsage1 : {}", df.format(cpuUsage)); + } + + +// +// @Test +// public void getUserList() { +// logger.info("getUserList : {}", OSUtils.getUserList()); +// } +// +// +// @Test +// public void getGroup() throws Exception { +// logger.info("getGroup : {}", OSUtils.getGroup()); +// logger.info("getGroup : {}", OSUtils.exeShell("groups")); +// +// +// } +// +// +// @Test +// public void getProcessID() { +// logger.info("getProcessID : {}", OSUtils.getProcessID()); +// } +// +// +// @Test +// public void getHost() { +// logger.info("getHost : {}", OSUtils.getHost()); +// } +// +// +// +// @Test +// public void anotherGetOsInfoTest() throws InterruptedException { +// OperatingSystemMXBean os = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class); +// final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); +// +// MemoryUsage memoryUsage = memoryMXBean.getHeapMemoryUsage(); +// double usage = (double)memoryUsage.getUsed() / (double)memoryUsage.getCommitted(); +// logger.info("memory usage : {}",usage); +// +// if (os instanceof UnixOperatingSystemMXBean) { +// UnixOperatingSystemMXBean unixOs = (UnixOperatingSystemMXBean) os; +// logger.info("getMaxFileDescriptorCount : {}" ,unixOs.getMaxFileDescriptorCount()); //10240 +// logger.info("getOpenFileDescriptorCount : {}",unixOs.getOpenFileDescriptorCount()); //241 +// logger.info("getAvailableProcessors : {}",unixOs.getAvailableProcessors()); //8 +// +// logger.info("getSystemLoadAverage : {}",unixOs.getSystemLoadAverage()); //1.36083984375 +// +// logger.info("getFreePhysicalMemorySize : {}",unixOs.getFreePhysicalMemorySize()); //209768448 +// +// logger.info("getTotalPhysicalMemorySize : {}",unixOs.getTotalPhysicalMemorySize()); //17179869184 16G +// +// for(int i = 0; i < 3; i++) { +// logger.info("getSystemCpuLoad : {}", unixOs.getSystemCpuLoad()); //0.0 +// +// logger.info("getProcessCpuLoad : {}", unixOs.getProcessCpuLoad() * 10); //0.0 +// Thread.sleep(1000l); +// } +// } +// } +// + +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OshiTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OshiTest.java new file mode 100644 index 0000000000..cfc069f9bb --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OshiTest.java @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.os; + + +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import oshi.SystemInfo; +import oshi.hardware.CentralProcessor; +import oshi.hardware.CentralProcessor.TickType; +import oshi.hardware.GlobalMemory; +import oshi.hardware.HardwareAbstractionLayer; +import oshi.util.FormatUtil; +import oshi.util.Util; + +import java.util.Arrays; + + +/** + * os information test + */ +public class OshiTest { + + private static Logger logger = LoggerFactory.getLogger(OshiTest.class); + + + @Test + public void test() { + + SystemInfo si = new SystemInfo(); + + HardwareAbstractionLayer hal = si.getHardware(); + + logger.info("Checking Memory..."); + printMemory(hal.getMemory()); + + + logger.info("Checking CPU..."); + printCpu(hal.getProcessor()); + + } + + + + private static void printMemory(GlobalMemory memory) { + + logger.info("memory avail:{} MB" , memory.getAvailable() / 1024 / 1024 );//memory avail:6863 MB + logger.info("memory total:{} MB" , memory.getTotal() / 1024 / 1024 );//memory total:16384 MB + } + + + private static void printCpu(CentralProcessor processor) { + logger.info(String.format("CPU load: %.1f%% (OS MXBean)%n", processor.getSystemCpuLoad() * 100));//CPU load: 24.9% (OS MXBean) + logger.info("CPU load averages : {}", processor.getSystemLoadAverage());//CPU load averages : 1.5234375 + + + logger.info("Uptime: " + FormatUtil.formatElapsedSecs(processor.getSystemUptime())); + logger.info("Context Switches/Interrupts: " + processor.getContextSwitches() + " / " + processor.getInterrupts()); + + + long[] prevTicks = processor.getSystemCpuLoadTicks(); + logger.info("CPU, IOWait, and IRQ ticks @ 0 sec:" + Arrays.toString(prevTicks)); + //Wait a second... + Util.sleep(1000); + long[] ticks = processor.getSystemCpuLoadTicks(); + logger.info("CPU, IOWait, and IRQ ticks @ 1 sec:" + Arrays.toString(ticks)); + long user = ticks[TickType.USER.getIndex()] - prevTicks[TickType.USER.getIndex()]; + long nice = ticks[TickType.NICE.getIndex()] - prevTicks[TickType.NICE.getIndex()]; + long sys = ticks[TickType.SYSTEM.getIndex()] - prevTicks[TickType.SYSTEM.getIndex()]; + long idle = ticks[TickType.IDLE.getIndex()] - prevTicks[TickType.IDLE.getIndex()]; + long iowait = ticks[TickType.IOWAIT.getIndex()] - prevTicks[TickType.IOWAIT.getIndex()]; + long irq = ticks[TickType.IRQ.getIndex()] - prevTicks[TickType.IRQ.getIndex()]; + long softirq = ticks[TickType.SOFTIRQ.getIndex()] - prevTicks[TickType.SOFTIRQ.getIndex()]; + long steal = ticks[TickType.STEAL.getIndex()] - prevTicks[TickType.STEAL.getIndex()]; + long totalCpu = user + nice + sys + idle + iowait + irq + softirq + steal; + + logger.info(String.format( + "User: %.1f%% Nice: %.1f%% System: %.1f%% Idle: %.1f%% IOwait: %.1f%% IRQ: %.1f%% SoftIRQ: %.1f%% Steal: %.1f%%%n", + 100d * user / totalCpu, 100d * nice / totalCpu, 100d * sys / totalCpu, 100d * idle / totalCpu, + 100d * iowait / totalCpu, 100d * irq / totalCpu, 100d * softirq / totalCpu, 100d * steal / totalCpu)); + logger.info(String.format("CPU load: %.1f%% (counting ticks)%n", processor.getSystemCpuLoadBetweenTicks() * 100)); + + + + double[] loadAverage = processor.getSystemLoadAverage(3); + logger.info("CPU load averages:" + (loadAverage[0] < 0 ? " N/A" : String.format(" %.2f", loadAverage[0])) + + (loadAverage[1] < 0 ? " N/A" : String.format(" %.2f", loadAverage[1])) + + (loadAverage[2] < 0 ? " N/A" : String.format(" %.2f", loadAverage[2]))); + // per core CPU + StringBuilder procCpu = new StringBuilder("CPU load per processor:"); + double[] load = processor.getProcessorCpuLoadBetweenTicks(); + for (double avg : load) { + procCpu.append(String.format(" %.1f%%", avg * 100)); + } + logger.info(procCpu.toString()); + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/TaskQueueImplTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/TaskQueueImplTest.java new file mode 100644 index 0000000000..61d689fa4e --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/TaskQueueImplTest.java @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.queue; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.IpUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.common.zk.StandaloneZKServerForTest; +import org.junit.After; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Random; + +import static org.junit.Assert.assertEquals; + +/** + * task queue test + */ +public class TaskQueueImplTest extends StandaloneZKServerForTest { + + private static final Logger logger = LoggerFactory.getLogger(TaskQueueImplTest.class); + + ITaskQueue tasksQueue = null; + + @Before + public void before(){ + super.before(); + + tasksQueue = TaskQueueFactory.getTaskQueueInstance(); + + //clear all data + tasksQueue.delete(); + + } + + + @After + public void after(){ + //clear all data + tasksQueue.delete(); + } + + + @Test + public void testAdd(){ + + + //add + tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"1_0_1_1_-1"); + tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"0_1_1_1_-1"); + tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"0_0_0_1_" + IpUtils.ipToLong(OSUtils.getHost())); + tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"1_2_1_1_" + IpUtils.ipToLong(OSUtils.getHost()) + 10); + + List tasks = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, 1); + + if(tasks.size() <= 0){ + return; + } + + //pop + String node1 = tasks.get(0); + + assertEquals(node1,"0_0_0_1_" + IpUtils.ipToLong(OSUtils.getHost())); + + + } + + + + /** + * test one million data from zookeeper queue + */ + @Ignore + @Test + public void extremeTest(){ + int total = 30 * 10000; + + for(int i = 0; i < total; i++) + { + for(int j = 0; j < total; j++) { + //${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} + //format ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} + String formatTask = String.format("%s_%d_%s_%d", i, i + 1, j, j == 0 ? 0 : j + new Random().nextInt(100)); + tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE, formatTask); + } + } + + String node1 = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, 1).get(0); + assertEquals(node1,"0"); + + } + +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/shell/ShellExecutorTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/shell/ShellExecutorTest.java new file mode 100644 index 0000000000..ac1222f3e6 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/shell/ShellExecutorTest.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.shell; + +import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.concurrent.CountDownLatch; + +public class ShellExecutorTest { + private static final Logger logger = LoggerFactory.getLogger(ShellExecutorTest.class); + + @Test + public void execCommand() throws InterruptedException { + + ThreadPoolExecutors executors = ThreadPoolExecutors.getInstance(); + CountDownLatch latch = new CountDownLatch(200); + + executors.execute(new Runnable() { + @Override + public void run() { + + try { + int i =0; + while(i++ <= 100){ + String res = ShellExecutor.execCommand("groups"); + logger.info("time:" + i + ",thread id:" + Thread.currentThread().getId() + ", result:" + res.substring(0,5)); + Thread.sleep(100l); + latch.countDown(); + } + + } catch (IOException | InterruptedException e) { + e.printStackTrace(); + } + } + }); + + executors.execute(new Runnable() { + @Override + public void run() { + + try { + int i =0; + while(i++ <= 100){ + String res = ShellExecutor.execCommand("whoami"); + logger.info("time:" + i + ",thread id:" + Thread.currentThread().getId() + ", result2:" + res); + Thread.sleep(100l); + latch.countDown(); + } + + } catch (IOException | InterruptedException e) { + e.printStackTrace(); + } + } + }); + + latch.await(); + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java new file mode 100644 index 0000000000..582a29da2a --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.threadutils; + +import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ThreadPoolExecutorsTest { + + private static final Logger logger = LoggerFactory.getLogger(ThreadPoolExecutors.class); + + + @Test + public void testThreadPoolExecutors() throws InterruptedException { + + Thread2[] threadArr = new Thread2[10]; + for (int i = 0; i < threadArr.length; i++) { + + threadArr[i] = new Thread2(); + threadArr[i].setDaemon(false); + threadArr[i].start(); + } + + Thread.currentThread().join(40000l); + } + + + //test thread + class Thread2 extends Thread { + @Override + public void run() { + logger.info(String.format("ThreadPoolExecutors instance's hashcode is: %s ",ThreadPoolExecutors.getInstance("a",2).hashCode())); + } + } + + +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java new file mode 100644 index 0000000000..394cf5f64a --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + + +public class CollectionUtilsTest { + + @Test + public void equalLists() { + List a = new ArrayList(); + + a.add(1); + a.add(2); + a.add(3); + + List b = new ArrayList(); + b.add(3); + b.add(2); + b.add(1); + + Assert.assertTrue(CollectionUtils.equalLists(a,b)); + + } + + @Test + public void subtract() { + Set a = new HashSet(); + + a.add(1); + a.add(2); + a.add(3); + + Set b = new HashSet(); + b.add(0); + b.add(2); + b.add(4); + + + Assert.assertArrayEquals(new Integer[]{1,3},CollectionUtils.subtract(a,b).toArray()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java new file mode 100644 index 0000000000..f38b9b4c3b --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.InetAddress; +import java.net.UnknownHostException; + +/** + * configuration test + */ +public class CommonUtilsTest { + private static final Logger logger = LoggerFactory.getLogger(CommonUtilsTest.class); + @Test + public void getHdfsDataBasePath() { + logger.info(HadoopUtils.getHdfsDataBasePath()); + } + + @Test + public void getDownloadFilename() { + logger.info(FileUtils.getDownloadFilename("a.txt")); + } + + @Test + public void getUploadFilename() { + logger.info(FileUtils.getUploadFilename("1234", "a.txt")); + } + + @Test + public void getHdfsDir() { + logger.info(HadoopUtils.getHdfsResDir("1234")); + } + + @Test + public void test(){ + InetAddress IP = null; + try { + IP = InetAddress.getLocalHost(); + logger.info(IP.getHostAddress()); + } catch (UnknownHostException e) { + e.printStackTrace(); + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DateUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DateUtilsTest.java new file mode 100644 index 0000000000..bcaa391042 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DateUtilsTest.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.junit.Assert; +import org.junit.Test; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; + +public class DateUtilsTest { + + @Test + public void format2Readable() throws ParseException { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + + String start = "2015-12-21 18:00:36"; + Date startDate = sdf.parse(start); + + String end = "2015-12-23 03:23:44"; + Date endDate = sdf.parse(end); + + String readableDate = DateUtils.format2Readable(endDate.getTime() - startDate.getTime()); + + Assert.assertEquals("01 09:23:08", readableDate); + } + + + @Test + public void testWeek(){ + + Date curr = DateUtils.stringToDate("2019-02-01 00:00:00"); + Date monday1 = DateUtils.stringToDate("2019-01-28 00:00:00"); + Date sunday1 = DateUtils.stringToDate("2019-02-03 00:00:00"); + Date monday = DateUtils.getMonday(curr); + Date sunday = DateUtils.getSunday(monday); + + Assert.assertEquals(monday, monday1); + Assert.assertEquals(sunday, sunday1); + + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java new file mode 100644 index 0000000000..7311dcf5b6 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.DependentRelation; +import org.apache.dolphinscheduler.common.model.DateInterval; +import org.apache.dolphinscheduler.common.shell.ShellExecutorTest; +import org.apache.dolphinscheduler.common.utils.dependent.DependentDateUtils; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +public class DependentUtilsTest { + private static final Logger logger = LoggerFactory.getLogger(ShellExecutorTest.class); + @Test + public void getDependResultForRelation() { + + DependentRelation dependentRelation = DependentRelation.AND; + List dependResultList = new ArrayList<>(); + dependResultList.add(DependResult.FAILED); + dependResultList.add(DependResult.SUCCESS); + DependResult result = DependentUtils.getDependResultForRelation( dependentRelation, dependResultList); + Assert.assertEquals(result, DependResult.FAILED); + + dependentRelation = DependentRelation.OR; + + Assert.assertEquals(DependentUtils.getDependResultForRelation( dependentRelation, dependResultList), + DependResult.SUCCESS); + } + + @Test + public void getDateIntervalList() { + + Date curDay = DateUtils.stringToDate("2019-02-05 00:00:00"); + + DateInterval diCur = new DateInterval(DateUtils.getStartOfDay(curDay), + DateUtils.getEndOfDay(curDay)); + + Date day1 = DateUtils.stringToDate("2019-02-04 00:00:00"); + DateInterval di1 = new DateInterval(DateUtils.getStartOfDay(day1), + DateUtils.getEndOfDay(day1)); + Date day2 = DateUtils.stringToDate("2019-02-03 00:00:00"); + DateInterval di2 = new DateInterval(DateUtils.getStartOfDay(day2), + DateUtils.getEndOfDay(day2)); + String dateValue = "last1Days"; + List dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); + Assert.assertEquals(dateIntervals.get(0), di1); + + dateValue = "last2Days"; + dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); + for(DateInterval dateInterval : dateIntervals){ + logger.info(dateInterval.getStartTime().toString() + " == " + dateInterval.getEndTime().toString()); + } + + Assert.assertEquals(dateIntervals.get(1), di1); + Assert.assertEquals(dateIntervals.get(0), di2); + + dateValue = "today"; + dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); + Assert.assertEquals(dateIntervals.get(0), diCur); + + + dateValue = "thisWeek"; + Date firstWeekDay = DateUtils.getMonday(curDay); + dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); + + DateInterval weekHead = new DateInterval(DateUtils.getStartOfDay(firstWeekDay), DateUtils.getEndOfDay(firstWeekDay)); + DateInterval weekThis = new DateInterval(DateUtils.getStartOfDay(curDay), DateUtils.getEndOfDay(curDay)); + + Assert.assertEquals(dateIntervals.get(0), weekHead); + Assert.assertEquals(dateIntervals.get(dateIntervals.size() - 1), weekThis); + + + dateValue = "thisMonth"; + Date firstMonthDay = DateUtils.getFirstDayOfMonth(curDay); + dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); + + DateInterval monthHead = new DateInterval(DateUtils.getStartOfDay(firstMonthDay), DateUtils.getEndOfDay(firstMonthDay)); + DateInterval monthThis = new DateInterval(DateUtils.getStartOfDay(curDay), DateUtils.getEndOfDay(curDay)); + + Assert.assertEquals(dateIntervals.get(0), monthHead); + Assert.assertEquals(dateIntervals.get(dateIntervals.size() - 1), monthThis); + + } + + @Test + public void testWeek(){ + + Date curDay = DateUtils.stringToDate("2019-02-05 00:00:00"); + Date day1 = DateUtils.stringToDate("2019-01-28 00:00:00"); + DateInterval di1 = new DateInterval(DateUtils.getStartOfDay(day1), + DateUtils.getEndOfDay(day1)); + + Date day2 = DateUtils.stringToDate("2019-01-29 00:00:00"); + DateInterval di2 = new DateInterval(DateUtils.getStartOfDay(day2), + DateUtils.getEndOfDay(day2)); + Date day3 = DateUtils.stringToDate("2019-01-30 00:00:00"); + DateInterval di3 = new DateInterval(DateUtils.getStartOfDay(day3), + DateUtils.getEndOfDay(day3)); + Date day4 = DateUtils.stringToDate("2019-01-31 00:00:00"); + DateInterval di4 = new DateInterval(DateUtils.getStartOfDay(day4), + DateUtils.getEndOfDay(day4)); + Date day5 = DateUtils.stringToDate("2019-02-01 00:00:00"); + DateInterval di5 = new DateInterval(DateUtils.getStartOfDay(day5), + DateUtils.getEndOfDay(day5)); + Date day6 = DateUtils.stringToDate("2019-02-02 00:00:00"); + DateInterval di6 = new DateInterval(DateUtils.getStartOfDay(day6), + DateUtils.getEndOfDay(day6)); + Date day7 = DateUtils.stringToDate("2019-02-03 00:00:00"); + DateInterval di7 = new DateInterval(DateUtils.getStartOfDay(day7), + DateUtils.getEndOfDay(day7)); + List dateIntervals = DependentDateUtils.getLastWeekInterval(curDay); + Assert.assertEquals(dateIntervals.size(), 7); + Assert.assertEquals(dateIntervals.get(0), di1); + Assert.assertEquals(dateIntervals.get(1), di2); + Assert.assertEquals(dateIntervals.get(2), di3); + Assert.assertEquals(dateIntervals.get(3), di4); + + List monday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 1); + Assert.assertEquals(monday.get(0), di1); + List tuesday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 2); + Assert.assertEquals(tuesday.get(0), di2); + List wednesday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 3); + Assert.assertEquals(wednesday.get(0), di3); + List thursday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 4); + Assert.assertEquals(thursday.get(0), di4); + List friday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 5); + Assert.assertEquals(friday.get(0), di5); + List saturday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 6); + Assert.assertEquals(saturday.get(0), di6); + List sunday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 7); + Assert.assertEquals(sunday.get(0), di7); + } + + @Test + public void testHour(){ + + Date curDay = DateUtils.stringToDate("2019-02-05 12:10:00"); + Date day1 = DateUtils.stringToDate("2019-02-05 11:00:00"); + DateInterval di1 = new DateInterval(DateUtils.getStartOfHour(day1), + DateUtils.getEndOfHour(day1)); + Date day2 = DateUtils.stringToDate("2019-02-05 10:00:00"); + DateInterval di2 = new DateInterval(DateUtils.getStartOfHour(day2), + DateUtils.getEndOfHour(day2)); + Date day3 = DateUtils.stringToDate("2019-02-05 09:00:00"); + DateInterval di3 = new DateInterval(DateUtils.getStartOfHour(day3), + DateUtils.getEndOfHour(day3)); + + List dateIntervals = DependentDateUtils.getLastHoursInterval(curDay, 1); + Assert.assertEquals(dateIntervals.get(0), di1); + dateIntervals = DependentDateUtils.getLastHoursInterval(curDay, 2); + Assert.assertEquals(dateIntervals.get(1), di1); + Assert.assertEquals(dateIntervals.get(0), di2); + dateIntervals = DependentDateUtils.getLastHoursInterval(curDay, 3); + Assert.assertEquals(dateIntervals.get(2), di1); + Assert.assertEquals(dateIntervals.get(1), di2); + Assert.assertEquals(dateIntervals.get(0), di3); + + } + + + @Test + public void testMonth(){ + Date curDay = DateUtils.stringToDate("2019-02-05 00:00:00"); + Date day1 = DateUtils.stringToDate("2019-01-01 00:00:00"); + DateInterval di1 = new DateInterval(DateUtils.getStartOfDay(day1), + DateUtils.getEndOfDay(day1)); + + Date day2 = DateUtils.stringToDate("2019-01-31 00:00:00"); + DateInterval di2 = new DateInterval(DateUtils.getStartOfDay(day2), + DateUtils.getEndOfDay(day2)); + + List dateIntervals = DependentDateUtils.getLastMonthInterval(curDay); + + Assert.assertEquals(dateIntervals.size(), 31); + Assert.assertEquals(dateIntervals.get(0), di1); + Assert.assertEquals(dateIntervals.get(30), di2); + } + +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java new file mode 100644 index 0000000000..93faf465ad --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.junit.Assert; +import org.junit.Test; + +public class FileUtilsTest { + + @Test + public void suffix() { + Assert.assertEquals(FileUtils.suffix("ninfor.java"),"java"); + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java new file mode 100644 index 0000000000..6ca6360bf8 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.List; + +@Ignore +public class HadoopUtilsTest { + + private static final Logger logger = LoggerFactory.getLogger(HadoopUtilsTest.class); + + @Test + public void getActiveRMTest() { + logger.info(HadoopUtils.getAppAddress("http://ark1:8088/ws/v1/cluster/apps/%s","192.168.xx.xx,192.168.xx.xx")); + } + + @Test + public void getApplicationStatusAddressTest(){ + logger.info(HadoopUtils.getInstance().getApplicationUrl("application_1548381297012_0030")); + } + + @Test + public void test() throws IOException { + HadoopUtils.getInstance().copyLocalToHdfs("/root/teamviewer_13.1.8286.x86_64.rpm", "/journey", true, true); + } + + @Test + public void readFileTest(){ + try { + byte[] bytes = HadoopUtils.getInstance().catFile("/escheduler/hdfs/resources/35435.sh"); + logger.info("------------------start"); + logger.info(new String(bytes)); + logger.info("---------------------end"); + } catch (Exception e) { + + } + } + @Test + public void testCapacity(){ + + } + @Test + public void testMove(){ + HadoopUtils instance = HadoopUtils.getInstance(); + try { + instance.copy("/opt/apptest/test.dat","/opt/apptest/test.dat.back",true,true); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + + } + + @Test + public void getApplicationStatus() { + logger.info(HadoopUtils.getInstance().getApplicationStatus("application_1542010131334_0029").toString()); + } + + @Test + public void getApplicationUrl(){ + String application_1516778421218_0042 = HadoopUtils.getInstance().getApplicationUrl("application_1529051418016_0167"); + logger.info(application_1516778421218_0042); + } + + @Test + public void catFileTest()throws Exception{ + List stringList = HadoopUtils.getInstance().catFile("/escheduler/hdfs/resources/WCSparkPython.py", 0, 1000); + logger.info(String.join(",",stringList)); + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java new file mode 100644 index 0000000000..9a6e366f21 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * HttpClient utils test + */ +public class HttpUtilsTest { + + + public static final Logger logger = LoggerFactory.getLogger(HttpUtilsTest.class); + + + @Ignore + @Test + public void getTest(){ + + String result = HttpUtils.get("http://192.168.xx.xx:8088/ws/v1/cluster/info"); + logger.info(result); + + + JSONObject jsonObject = JSON.parseObject(result); + String string = jsonObject.getJSONObject("clusterInfo").getString("haState"); + logger.info(string); + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java new file mode 100644 index 0000000000..6f5f72631d --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java @@ -0,0 +1,39 @@ +package org.apache.dolphinscheduler.common.utils; + +import org.junit.Assert; +import org.junit.Test; + +public class IpUtilsTest { + + @Test + public void ipToLong() { + + String ip = "192.168.110.1"; + String ip2 = "0.0.0.0"; + long longNumber = IpUtils.ipToLong(ip); + long longNumber2 = IpUtils.ipToLong(ip2); + System.out.println(longNumber); + Assert.assertEquals(longNumber, 3232263681L); + Assert.assertEquals(longNumber2, 0L); + + String ip3 = "255.255.255.255"; + long longNumber3 = IpUtils.ipToLong(ip3); + System.out.println(longNumber3); + Assert.assertEquals(longNumber3, 4294967295L); + + } + + @Test + public void longToIp() { + + String ip = "192.168.110.1"; + String ip2 = "0.0.0.0"; + long longNum = 3232263681L; + String i1 = IpUtils.longToIp(longNum); + + String i2 = IpUtils.longToIp(0); + + Assert.assertEquals(ip, i1); + Assert.assertEquals(ip2, i2); + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java new file mode 100644 index 0000000000..a7af13191d --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.enums.DataType; +import org.apache.dolphinscheduler.common.enums.Direct; +import org.apache.dolphinscheduler.common.process.Property; +import com.alibaba.fastjson.JSONObject; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +public class JSONUtilsTest { + + + @Test + public void toMap() { + + String jsonStr = "{\"id\":\"1001\",\"name\":\"Jobs\"}"; + + Map models = JSONUtils.toMap(jsonStr); + Assert.assertEquals(models.get("id"), "1001"); + Assert.assertEquals(models.get("name"), "Jobs"); + + } + + @Test + public void convert2Property(){ + Property property = new Property(); + property.setProp("ds"); + property.setDirect(Direct.IN); + property.setType(DataType.VARCHAR); + property.setValue("sssssss"); + String str = "{\"direct\":\"IN\",\"prop\":\"ds\",\"type\":\"VARCHAR\",\"value\":\"sssssss\"}"; + Property property1 = JSONObject.parseObject(str, Property.class); + Direct direct = property1.getDirect(); + Assert.assertEquals(direct , Direct.IN); + } + + + @Test + public void String2MapTest(){ + String str = list2String(); + + List maps = JSONUtils.toList(str, + LinkedHashMap.class); + + Assert.assertEquals(maps.size(), 1); + Assert.assertEquals(maps.get(0).get("mysql服务名称"), "mysql200"); + Assert.assertEquals(maps.get(0).get("mysql地址"), "192.168.xx.xx"); + Assert.assertEquals(maps.get(0).get("端口"), "3306"); + Assert.assertEquals(maps.get(0).get("期间内没有使用索引的查询数握"), "80"); + Assert.assertEquals(maps.get(0).get("数据库客户端连接数"), "190"); + } + + public String list2String(){ + + LinkedHashMap map1 = new LinkedHashMap<>(); + map1.put("mysql服务名称","mysql200"); + map1.put("mysql地址","192.168.xx.xx"); + map1.put("端口","3306"); + map1.put("期间内没有使用索引的查询数握","80"); + map1.put("数据库客户端连接数","190"); + + List> maps = new ArrayList<>(); + maps.add(0,map1); + String resultJson = JSONUtils.toJson(maps); + return resultJson; + } + + @Test + public void testToJson() { + Map map = new HashMap<>(); + map.put("foo","bar"); + + Assert.assertEquals("{\"foo\":\"bar\"}", JSONUtils.toJson(map)); + Assert.assertEquals( + String.valueOf((Object) null), JSONUtils.toJson(null)); + } + + @Test + public void testParseObject() { + Assert.assertEquals("{\"foo\":\"bar\"}", JSONUtils.parseObject( + "{\n" + "\"foo\": \"bar\",\n" + "}", String.class)); + + Assert.assertNull(JSONUtils.parseObject("", null)); + Assert.assertNull(JSONUtils.parseObject("foo", String.class)); + } + + @Test + public void testToList() { + Assert.assertEquals(new ArrayList(), + JSONUtils.toList("A1B2C3", null)); + Assert.assertEquals(new ArrayList(), + JSONUtils.toList("", null)); + } + + @Test + public void testCheckJsonVaild() { + Assert.assertTrue(JSONUtils.checkJsonVaild("3")); + Assert.assertFalse(JSONUtils.checkJsonVaild("")); + } + + @Test + public void testFindValue() { + Assert.assertNull(JSONUtils.findValue( + new ArrayNode(new JsonNodeFactory(true)), null)); + } + + @Test + public void testToMap() { + Map map = new HashMap<>(); + map.put("foo","bar"); + + Assert.assertTrue(map.equals(JSONUtils.toMap( + "{\n" + "\"foo\": \"bar\",\n" + "}"))); + + Assert.assertFalse(map.equals(JSONUtils.toMap( + "{\n" + "\"bar\": \"foo\",\n" + "}"))); + + Assert.assertNull(JSONUtils.toMap("3")); + Assert.assertNull(JSONUtils.toMap(null)); + Assert.assertNull(JSONUtils.toMap("3", null, null)); + Assert.assertNull(JSONUtils.toMap(null, null, null)); + } + + @Test + public void testToJsonString() { + Map map = new HashMap<>(); + map.put("foo", "bar"); + + Assert.assertEquals("{\"foo\":\"bar\"}", + JSONUtils.toJsonString(map)); + Assert.assertEquals(String.valueOf((Object) null), + JSONUtils.toJsonString(null)); + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PropertyUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PropertyUtilsTest.java new file mode 100644 index 0000000000..2b9b0b54fd --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PropertyUtilsTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.junit.Test; + +import static org.junit.Assert.assertNotNull; + +public class PropertyUtilsTest { + + @Test + public void getString() { + assertNotNull(PropertyUtils.getString(Constants.FS_DEFAULTFS)); + assertNotNull(PropertyUtils.getInt("spring.redis.port")); + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringTest.java new file mode 100644 index 0000000000..99a2cf05bc --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringTest.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +public class StringTest { + + + @Test + public void test1(){ + System.out.println(String.format("%s_%010d_%010d", String.valueOf(1), Long.valueOf(3), Integer.valueOf(4))); + } + + @Test + public void stringCompareTest(){ + + for(int j = 0; j < 5; j++) { + long start = System.currentTimeMillis(); + int size = 10000; + + List taskList = new ArrayList<>(size); + + //init + for (int i = 0; i < size; i++) { + taskList.add(String.format("%d_%010d_%010d", 1, i, i + 1)); + } + + String origin = taskList.get(0); + for (int i = 1; i < taskList.size(); i++) { + String str = taskList.get(i); + int result = str.compareTo(origin); + if (result < 0) { + origin = str; + } + } + double during = (System.currentTimeMillis() - start) / 1000.0; + System.out.println(during); + Assert.assertEquals("1_0000000000_0000000001", origin); + } + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java new file mode 100644 index 0000000000..ee0a8aafe3 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils.placeholder; + +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.util.Date; + +public class TimePlaceholderUtilsTest { + + Date date = null; + + @Before + public void init(){ + date = DateUtils.parse("20170101010101","yyyyMMddHHmmss"); + } + + @Test + public void replacePlaceholdersT() { + Assert.assertEquals("2017test12017:***2016-12-31,20170102,20170130,20161227,20161231", TimePlaceholderUtils.replacePlaceholders("$[yyyy]test1$[yyyy:***]$[yyyy-MM-dd-1],$[month_begin(yyyyMMdd, 1)],$[month_end(yyyyMMdd, -1)],$[week_begin(yyyyMMdd, 1)],$[week_end(yyyyMMdd, -1)]", + date, true)); + + Assert.assertEquals("1483200061,1483290061,1485709261,1482771661,1483113600,1483203661", TimePlaceholderUtils.replacePlaceholders("$[timestamp(yyyyMMdd00mmss)]," + + "$[timestamp(month_begin(yyyyMMddHHmmss, 1))]," + + "$[timestamp(month_end(yyyyMMddHHmmss, -1))]," + + "$[timestamp(week_begin(yyyyMMddHHmmss, 1))]," + + "$[timestamp(week_end(yyyyMMdd000000, -1))]," + + "$[timestamp(yyyyMMddHHmmss)]", + date, true)); + } + + + + @Test + public void calcMinutesT() { + Assert.assertEquals("Sun Jan 01 01:01:01 CST 2017=yyyy", TimePlaceholderUtils.calcMinutes("yyyy", date).toString()); + Assert.assertEquals("Sun Jan 08 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+7*1", date).toString()); + Assert.assertEquals("Sun Dec 25 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-7*1", date).toString()); + Assert.assertEquals("Mon Jan 02 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+1", date).toString()); + Assert.assertEquals("Sat Dec 31 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-1", date).toString()); + Assert.assertEquals("Sun Jan 01 02:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH+1/24", date).toString()); + Assert.assertEquals("Sun Jan 01 00:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH-1/24", date).toString()); + } + + @Test + public void calcMonthsT() { + Assert.assertEquals("Mon Jan 01 01:01:01 CST 2018=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,12*1)", date).toString()); + Assert.assertEquals("Fri Jan 01 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,-12*1)", date).toString()); + } + +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/StandaloneZKServerForTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/StandaloneZKServerForTest.java new file mode 100644 index 0000000000..2a76244f8b --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/StandaloneZKServerForTest.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.zk; + +import java.io.File; +import java.util.Properties; + +import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; +import org.apache.zookeeper.server.ServerConfig; +import org.apache.zookeeper.server.ZooKeeperServerMain; +import org.apache.zookeeper.server.quorum.QuorumPeerConfig; +import org.junit.Before; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * just for test + */ +public class StandaloneZKServerForTest { + + private static final Logger logger = LoggerFactory.getLogger(StandaloneZKServerForTest.class); + + private static volatile ZooKeeperServerMain zkServer = null; + + + @Before + public void before() { + logger.info("standalone zookeeper server for test service start "); + + ThreadPoolExecutors.getInstance().execute(new Runnable() { + @Override + public void run() { + + //delete zk data dir ? + File zkFile = new File(System.getProperty("java.io.tmpdir"), "zookeeper"); +// if(zkFile.exists()){ +// zkFile.delete(); +// } + startStandaloneServer("2000", zkFile.getAbsolutePath(), "2181", "10", "5"); + } + }); + + } + + + /** + * start zk server + * @param tickTime zookeeper ticktime + * @param dataDir zookeeper data dir + * @param clientPort zookeeper client port + * @param initLimit zookeeper init limit + * @param syncLimit zookeeper sync limit + */ + private void startStandaloneServer(String tickTime, String dataDir, String clientPort, String initLimit, String syncLimit) { + Properties props = new Properties(); + props.setProperty("tickTime", tickTime); + props.setProperty("dataDir", dataDir); + props.setProperty("clientPort", clientPort); + props.setProperty("initLimit", initLimit); + props.setProperty("syncLimit", syncLimit); + + QuorumPeerConfig quorumConfig = new QuorumPeerConfig(); + try { + quorumConfig.parseProperties(props); + + if(zkServer == null ){ + + synchronized (StandaloneZKServerForTest.class){ + if(zkServer == null ){ + zkServer = new ZooKeeperServerMain(); + final ServerConfig config = new ServerConfig(); + config.readFrom(quorumConfig); + zkServer.runFromConfig(config); + } + } + + } + + } catch (Exception e) { + logger.error("start standalone server fail!", e); + } + } + + +} \ No newline at end of file diff --git a/escheduler-common/src/test/resources/dao/data_source.properties b/dolphinscheduler-common/src/test/resources/dao/data_source.properties similarity index 100% rename from escheduler-common/src/test/resources/dao/data_source.properties rename to dolphinscheduler-common/src/test/resources/dao/data_source.properties diff --git a/dolphinscheduler-dao/pom.xml b/dolphinscheduler-dao/pom.xml new file mode 100644 index 0000000000..4aa4cafe6d --- /dev/null +++ b/dolphinscheduler-dao/pom.xml @@ -0,0 +1,192 @@ + + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + 1.1.0-SNAPSHOT + + dolphinscheduler-dao + dolphinscheduler-dao + http://maven.apache.org + + UTF-8 + + + + junit + junit + test + + + com.baomidou + mybatis-plus + ${mybatis-plus.version} + + + com.baomidou + mybatis-plus-boot-starter + ${mybatis-plus.version} + + + + + + + + org.postgresql + postgresql + + + org.projectlombok + lombok + ${lombok.version} + + + org.springframework.boot + spring-boot-starter-test + test + + + org.ow2.asm + asm + + + org.springframework.boot + spring-boot + + + org.springframework.boot + spring-boot-autoconfigure + + + + + + mysql + mysql-connector-java + + + + com.alibaba + druid + + + + ch.qos.logback + logback-classic + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.httpcomponents + httpclient + + + commons-httpclient + commons-httpclient + + + org.apache.commons + commons-lang3 + + + commons-lang + commons-lang + + + + com.alibaba + fastjson + compile + + + + com.cronutils + cron-utils + + + + org.quartz-scheduler + quartz + + + c3p0 + c3p0 + + + + + + org.quartz-scheduler + quartz-jobs + + + commons-configuration + commons-configuration + + + org.apache.dolphinscheduler + dolphinscheduler-common + + + protobuf-java + com.google.protobuf + + + + + org.springframework + spring-test + test + + + io.swagger + swagger-annotations + 1.5.20 + compile + + + org.yaml + snakeyaml + + + + + + + + src/main/java + + **/*.xml + + false + + + src/main/resources + + **/*.xml + **/*.yml + + false + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${java.version} + ${java.version} + ${project.build.sourceEncoding} + + + + + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AbstractBaseDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AbstractBaseDao.java new file mode 100644 index 0000000000..0b1090c368 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AbstractBaseDao.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao; + +/** + * base dao + */ +public abstract class AbstractBaseDao { + + protected abstract void init(); + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java new file mode 100644 index 0000000000..6ff582d2e9 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java @@ -0,0 +1,172 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao; + +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.enums.ShowType; +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.apache.dolphinscheduler.dao.mapper.AlertMapper; +import org.apache.dolphinscheduler.dao.mapper.UserAlertGroupMapper; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Date; +import java.util.List; + +@Component +public class AlertDao extends AbstractBaseDao { + + private final Logger logger = LoggerFactory.getLogger(getClass()); + + @Autowired + private AlertMapper alertMapper; + + @Autowired + private UserAlertGroupMapper userAlertGroupMapper; + + @Override + protected void init() { +// alertMapper = ConnectionFactory.getSqlSession().getMapper(AlertMapper.class); +// userAlertGroupMapper = ConnectionFactory.getSqlSession().getMapper(UserAlertGroupMapper.class); + } + + /** + * insert alert + * @param alert + * @return + */ + public int addAlert(Alert alert){ + return alertMapper.insert(alert); + } + + /** + * update alert + * @param alertStatus + * @param log + * @param id + * @return + */ + public int updateAlert(AlertStatus alertStatus,String log,int id){ + Alert alert = alertMapper.selectById(id); + alert.setAlertStatus(alertStatus); + alert.setUpdateTime(new Date()); + alert.setLog(log); + return alertMapper.updateById(alert); + } + + /** + * query user list by alert group id + * @param alerGroupId + * @return + */ + public List queryUserByAlertGroupId(int alerGroupId){ + + return userAlertGroupMapper.queryForUser(alerGroupId); + } + /** + * MasterServer or WorkerServer stoped + */ + public void sendServerStopedAlert(int alertgroupId,String host,String serverType){ + Alert alert = new Alert(); + String content = String.format("[{'type':'%s','host':'%s','event':'server down','warning level':'serious'}]", + serverType, host); + alert.setTitle("Fault tolerance warning"); + alert.setShowType(ShowType.TABLE); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(alertgroupId); + alert.setCreateTime(new Date()); + alert.setUpdateTime(new Date()); + alertMapper.insert(alert); + } + + /** + * process time out alert + * @param processInstance + * @param processDefinition + */ + public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition){ + int alertgroupId = processInstance.getWarningGroupId(); + String receivers = processDefinition.getReceivers(); + String receiversCc = processDefinition.getReceiversCc(); + Alert alert = new Alert(); + String content = String.format("[{'id':'%d','name':'%s','event':'timeout','warnLevel':'middle'}]", + processInstance.getId(), processInstance.getName()); + alert.setTitle("Process Timeout Warn"); + alert.setShowType(ShowType.TABLE); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(alertgroupId); + if (StringUtils.isNotEmpty(receivers)) { + alert.setReceivers(receivers); + } + if (StringUtils.isNotEmpty(receiversCc)) { + alert.setReceiversCc(receiversCc); + } + alert.setCreateTime(new Date()); + alert.setUpdateTime(new Date()); + alertMapper.insert(alert); + } + + /** + * task timeout warn + */ + public void sendTaskTimeoutAlert(int alertgroupId,String receivers,String receiversCc,int taskId,String taskName){ + Alert alert = new Alert(); + String content = String.format("[{'id':'%d','name':'%s','event':'timeout','warnLevel':'middle'}]",taskId,taskName); + alert.setTitle("Task Timeout Warn"); + alert.setShowType(ShowType.TABLE); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(alertgroupId); + if (StringUtils.isNotEmpty(receivers)) { + alert.setReceivers(receivers); + } + if (StringUtils.isNotEmpty(receiversCc)) { + alert.setReceiversCc(receiversCc); + } + alert.setCreateTime(new Date()); + alert.setUpdateTime(new Date()); + alertMapper.insert(alert); + } + + /** + * list the alert information of waiting to be executed + * @return + */ + public List listWaitExecutionAlert(){ + return alertMapper.listAlertByStatus(AlertStatus.WAIT_EXECUTION); + } + + /** + * list user information by alert group id + * @param alergroupId + * @return + */ + public List listUserByAlertgroupId(int alergroupId){ + return userAlertGroupMapper.listUserByAlertgroupId(alergroupId); + } + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/App.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/App.java new file mode 100644 index 0000000000..c39cc2439a --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/App.java @@ -0,0 +1,11 @@ +package org.apache.dolphinscheduler.dao; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +@SpringBootApplication +public class App { + public static void main(String[] args){ + SpringApplication.run(App.class); + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoFactory.java new file mode 100644 index 0000000000..36887593da --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoFactory.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * dao factory + */ +public class DaoFactory { + + private static final Logger logger = LoggerFactory.getLogger(DaoFactory.class); + + private static Map daoMap = new ConcurrentHashMap<>(); + + private DaoFactory(){ + + } + + /** + * 获取 Dao 实例 + * + * @param clazz + * @return Dao实例 + */ + @SuppressWarnings("unchecked") + public static T getDaoInstance(Class clazz) { + String className = clazz.getName(); + synchronized (daoMap) { + if (!daoMap.containsKey(className)) { + try { +// T t = BeanContext.getBean(clazz); + T t = clazz.getConstructor().newInstance(); + // 实例初始化 + t.init(); + daoMap.put(className, t); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + } + + return (T) daoMap.get(className); + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java new file mode 100644 index 0000000000..e542dfeae4 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java @@ -0,0 +1,145 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.dao.entity.MonitorRecord; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.*; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + + +/** + * database state dao + */ +public class MonitorDBDao { + + private static Logger logger = LoggerFactory.getLogger(MonitorDBDao.class); + public static final String VARIABLE_NAME = "variable_name"; + + /** + * 加载配置文件 + */ + private static Configuration conf; + + static { + try { + conf = new PropertiesConfiguration(Constants.DATA_SOURCE_PROPERTIES); + }catch (ConfigurationException e){ + logger.error("load configuration excetpion",e); + System.exit(1); + } + } + + /** + * create connection + * @return + */ + private static Connection getConn() { + String url = conf.getString(Constants.SPRING_DATASOURCE_URL); + String username = conf.getString(Constants.SPRING_DATASOURCE_USERNAME); + String password = conf.getString(Constants.SPRING_DATASOURCE_PASSWORD); + Connection conn = null; + try { + //classloader,load driver + Class.forName(Constants.JDBC_MYSQL_CLASS_NAME); + conn = DriverManager.getConnection(url, username, password); + } catch (ClassNotFoundException e) { + logger.error("ClassNotFoundException ", e); + } catch (SQLException e) { + logger.error("SQLException ", e); + } + return conn; + } + + + /** + * query database state + * @return + */ + public static List queryDatabaseState() { + List list = new ArrayList<>(1); + + Connection conn = null; + long maxConnections = 0; + long maxUsedConnections = 0; + long threadsConnections = 0; + long threadsRunningConnections = 0; + //mysql running state + int state = 1; + + + MonitorRecord monitorRecord = new MonitorRecord(); + try { + conn = getConn(); + if(conn == null){ + return list; + } + + Statement pstmt = conn.createStatement(); + + ResultSet rs1 = pstmt.executeQuery("show global variables"); + while(rs1.next()){ + if(rs1.getString(VARIABLE_NAME).toUpperCase().equals("MAX_CONNECTIONS")){ + maxConnections= Long.parseLong(rs1.getString("value")); + } + } + + ResultSet rs2 = pstmt.executeQuery("show global status"); + while(rs2.next()){ + if(rs2.getString(VARIABLE_NAME).toUpperCase().equals("MAX_USED_CONNECTIONS")){ + maxUsedConnections = Long.parseLong(rs2.getString("value")); + }else if(rs2.getString(VARIABLE_NAME).toUpperCase().equals("THREADS_CONNECTED")){ + threadsConnections = Long.parseLong(rs2.getString("value")); + }else if(rs2.getString(VARIABLE_NAME).toUpperCase().equals("THREADS_RUNNING")){ + threadsRunningConnections= Long.parseLong(rs2.getString("value")); + } + } + + + } catch (SQLException e) { + logger.error("SQLException ", e); + state = 0; + }finally { + try { + if(conn != null){ + conn.close(); + } + } catch (SQLException e) { + logger.error("SQLException ", e); + } + } + + monitorRecord.setDate(new Date()); + monitorRecord.setMaxConnections(maxConnections); + monitorRecord.setMaxUsedConnections(maxUsedConnections); + monitorRecord.setThreadsConnections(threadsConnections); + monitorRecord.setThreadsRunningConnections(threadsRunningConnections); + monitorRecord.setState(state); + + list.add(monitorRecord); + + return list; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/ProcessDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/ProcessDao.java new file mode 100644 index 0000000000..59a0d339ac --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/ProcessDao.java @@ -0,0 +1,1747 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.model.DateInterval; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.queue.ITaskQueue; +import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; +import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.IpUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.utils.cron.CronUtils; +import com.alibaba.fastjson.JSONObject; +import com.cronutils.model.Cron; +import org.apache.commons.lang3.ArrayUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.mapper.*; +import org.quartz.CronExpression; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +import java.util.*; +import java.util.stream.Collectors; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * process relative dao that some mappers in this. + */ +@Component +public class ProcessDao extends AbstractBaseDao { + + private final Logger logger = LoggerFactory.getLogger(getClass()); + + private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), + ExecutionStatus.RUNNING_EXEUTION.ordinal(), + ExecutionStatus.READY_PAUSE.ordinal(), +// ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(), + ExecutionStatus.READY_STOP.ordinal()}; + + @Autowired + private UserMapper userMapper; + + @Autowired + private ProcessDefinitionMapper processDefineMapper; + + @Autowired + private ProcessInstanceMapper processInstanceMapper; + + @Autowired + private DataSourceMapper dataSourceMapper; + + @Autowired + private ProcessInstanceMapMapper processInstanceMapMapper; + + @Autowired + private TaskInstanceMapper taskInstanceMapper; + + @Autowired + private CommandMapper commandMapper; + + @Autowired + private ScheduleMapper scheduleMapper; + + @Autowired + private UdfFuncMapper udfFuncMapper; + + @Autowired + private ResourceMapper resourceMapper; + + @Autowired + private WorkerGroupMapper workerGroupMapper; + + @Autowired + private ErrorCommandMapper errorCommandMapper; + + @Autowired + private TenantMapper tenantMapper; + + @Autowired + private ProjectMapper projectMapper; + + /** + * task queue impl + */ + protected ITaskQueue taskQueue; + + public ProcessDao(){ +// init(); + } + + /** + * initialize + */ + @Override + protected void init() { + userMapper = ConnectionFactory.getMapper(UserMapper.class); + processDefineMapper = ConnectionFactory.getMapper(ProcessDefinitionMapper.class); + processInstanceMapper = ConnectionFactory.getMapper(ProcessInstanceMapper.class); + dataSourceMapper = ConnectionFactory.getMapper(DataSourceMapper.class); + processInstanceMapMapper = ConnectionFactory.getMapper(ProcessInstanceMapMapper.class); + taskInstanceMapper = ConnectionFactory.getMapper(TaskInstanceMapper.class); + commandMapper = ConnectionFactory.getMapper(CommandMapper.class); + scheduleMapper = ConnectionFactory.getMapper(ScheduleMapper.class); + udfFuncMapper = ConnectionFactory.getMapper(UdfFuncMapper.class); + resourceMapper = ConnectionFactory.getMapper(ResourceMapper.class); + workerGroupMapper = ConnectionFactory.getMapper(WorkerGroupMapper.class); + taskQueue = TaskQueueFactory.getTaskQueueInstance(); + tenantMapper = ConnectionFactory.getMapper(TenantMapper.class); + } + + + /** + * find one command from command queue, construct process instance + * @param logger + * @param host + * @param validThreadNum + * @return + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public ProcessInstance scanCommand(Logger logger, String host, int validThreadNum){ + + ProcessInstance processInstance = null; + Command command = findOneCommand(); + if (command == null) { + return null; + } + logger.info(String.format("find one command: id: %d, type: %s", command.getId(),command.getCommandType().toString())); + + try{ + processInstance = constructProcessInstance(command, host); + //cannot construct process instance, return null; + if(processInstance == null){ + logger.error("scan command, command parameter is error: %s", command.toString()); + delCommandByid(command.getId()); + saveErrorCommand(command, "process instance is null"); + return null; + }else if(!checkThreadNum(command, validThreadNum)){ + logger.info("there is not enough thread for this command: {}",command.toString() ); + return setWaitingThreadProcess(command, processInstance); + }else{ + processInstance.setCommandType(command.getCommandType()); + processInstance.addHistoryCmd(command.getCommandType()); + saveProcessInstance(processInstance); + this.setSubProcessParam(processInstance); + delCommandByid(command.getId()); + return processInstance; + } + }catch (Exception e){ + logger.error("scan command error ", e); + saveErrorCommand(command, e.toString()); + delCommandByid(command.getId()); + } + return null; + } + + private void saveErrorCommand(Command command, String message) { + + ErrorCommand errorCommand = new ErrorCommand(command, message); + this.errorCommandMapper.insert(errorCommand); + } + + /** + * set process waiting thread + * @param command + * @param processInstance + * @return + */ + private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) { + processInstance.setState(ExecutionStatus.WAITTING_THREAD); + if(command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD){ + processInstance.addHistoryCmd(command.getCommandType()); + } + saveProcessInstance(processInstance); + this.setSubProcessParam(processInstance); + createRecoveryWaitingThreadCommand(command, processInstance); + return null; + } + + private boolean checkThreadNum(Command command, int validThreadNum) { + int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId()); + return validThreadNum >= commandThreadCount; + } + + /** + * insert one command + */ + public int createCommand(Command command) { + int result = 0; + if (command != null){ + result = commandMapper.insert(command); + } + return result; + } + + /** + * + * find one command from queue list + * @return + */ + public Command findOneCommand(){ + return commandMapper.getOneToRun(); + } + + /** + * check the input command exists in queue list + * @param command + * @return + */ + public Boolean verifyIsNeedCreateCommand(Command command){ + Boolean isNeedCreate = true; + Map cmdTypeMap = new HashMap(); + cmdTypeMap.put(CommandType.REPEAT_RUNNING,1); + cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS,1); + cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS,1); + CommandType commandType = command.getCommandType(); + + if(cmdTypeMap.containsKey(commandType)){ + JSONObject cmdParamObj = (JSONObject) JSONObject.parse(command.getCommandParam()); + JSONObject tempObj; + int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING); + + List commands = commandMapper.getAll(null); + //遍历所有命令 + for (Command tmpCommand:commands){ + if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){ + tempObj = (JSONObject) JSONObject.parse(tmpCommand.getCommandParam()); + if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){ + isNeedCreate = false; + break; + } + } + } + } + return isNeedCreate; + } + + /** + * find process instance detail by id + * @param processId + * @return + */ + public ProcessInstance findProcessInstanceDetailById(int processId){ + return processInstanceMapper.queryDetailById(processId); + } + + /** + * find process instance by id + * @param processId + * @return + */ + public ProcessInstance findProcessInstanceById(int processId){ + + return processInstanceMapper.selectById(processId); + } + + /** + * find process define by id. + * @param processDefinitionId + * @return + */ + public ProcessDefinition findProcessDefineById(int processDefinitionId) { + return processDefineMapper.selectById(processDefinitionId); + } + + /** + * delete work process instance by id + * @param processInstanceId + * @return + */ + public int deleteWorkProcessInstanceById(int processInstanceId){ + return processInstanceMapper.deleteById(processInstanceId); + } + + /** + * + * delete all sub process by parent instance id + * @return + */ + public int deleteAllSubWorkProcessByParentId(int processInstanceId){ + + List subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId); + + for(Integer subId : subProcessIdList ){ + deleteAllSubWorkProcessByParentId(subId); + deleteWorkProcessMapByParentId(subId); + deleteWorkProcessInstanceById(subId); + } + return 1; + } + + /** + * create process define + * @param processDefinition + * @return + */ + public int createProcessDefine(ProcessDefinition processDefinition){ + int count = 0; + if(processDefinition != null){ + count = this.processDefineMapper.insert(processDefinition); + } + return count; + } + + + /** + * calculate sub process number in the process define. + * @param processDefinitionId + * @return + */ + private Integer workProcessThreadNumCount(Integer processDefinitionId){ + List ids = new ArrayList<>(); + recurseFindSubProcessId(processDefinitionId, ids); + return ids.size()+1; + } + + /** + * recursive query sub process definition id by parent id. + * @param parentId + * @param ids + */ + public void recurseFindSubProcessId(int parentId, List ids){ + ProcessDefinition processDefinition = processDefineMapper.selectById(parentId); + String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + List taskNodeList = processData.getTasks(); + + if (taskNodeList != null && taskNodeList.size() > 0){ + + for (TaskNode taskNode : taskNodeList){ + String parameter = taskNode.getParams(); + if (parameter.contains(CMDPARAM_SUB_PROCESS_DEFINE_ID)){ + SubProcessParameters subProcessParam = JSONObject.parseObject(parameter, SubProcessParameters.class); + ids.add(subProcessParam.getProcessDefinitionId()); + recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids); + } + } + } + } + + /** + * create recovery waiting thread command when thread pool is not enough for the process instance. + * sub work process instance need not to create recovery command. + * create recovery waiting thread command and delete origin command at the same time. + * if the recovery command is exists, only update the field update_time + * @param originCommand + * @param processInstance + */ + public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) { + + // sub process doesnot need to create wait command + if(processInstance.getIsSubProcess() == Flag.YES){ + if(originCommand != null){ + commandMapper.deleteById(originCommand.getId()); + } + return; + } + Map cmdParam = new HashMap<>(); + cmdParam.put(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD, String.valueOf(processInstance.getId())); + // process instance quit by "waiting thread" state + if(originCommand == null){ + Command command = new Command( + CommandType.RECOVER_WAITTING_THREAD, + processInstance.getTaskDependType(), + processInstance.getFailureStrategy(), + processInstance.getExecutorId(), + processInstance.getProcessDefinitionId(), + JSONUtils.toJson(cmdParam), + processInstance.getWarningType(), + processInstance.getWarningGroupId(), + processInstance.getScheduleTime(), + processInstance.getProcessInstancePriority() + ); + saveCommand(command); + return ; + } + + // update the command time if current command if recover from waiting + if(originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD){ + originCommand.setUpdateTime(new Date()); + saveCommand(originCommand); + }else{ + // delete old command and create new waiting thread command + commandMapper.deleteById(originCommand.getId()); + originCommand.setId(0); + originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD); + originCommand.setUpdateTime(new Date()); + originCommand.setCommandParam(JSONUtils.toJson(cmdParam)); + originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority()); + saveCommand(originCommand); + } + } + + /** + * get schedule time from command + * @param command + * @param cmdParam + * @return + */ + private Date getScheduleTime(Command command, Map cmdParam){ + Date scheduleTime = command.getScheduleTime(); + if(scheduleTime == null){ + if(cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)){ + scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); + } + } + return scheduleTime; + } + + /** + * generate a new work process instance from command. + * @param processDefinition + * @param command + * @param cmdParam + * @return + */ + private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition, + Command command, + Map cmdParam){ + ProcessInstance processInstance = new ProcessInstance(processDefinition); + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + processInstance.setRecovery(Flag.NO); + processInstance.setStartTime(new Date()); + processInstance.setRunTimes(1); + processInstance.setMaxTryTimes(0); + processInstance.setProcessDefinitionId(command.getProcessDefinitionId()); + processInstance.setCommandParam(command.getCommandParam()); + processInstance.setCommandType(command.getCommandType()); + processInstance.setIsSubProcess(Flag.NO); + processInstance.setTaskDependType(command.getTaskDependType()); + processInstance.setFailureStrategy(command.getFailureStrategy()); + processInstance.setExecutorId(command.getExecutorId()); + WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType(); + processInstance.setWarningType(warningType); + Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId(); + processInstance.setWarningGroupId(warningGroupId); + + // schedule time + Date scheduleTime = getScheduleTime(command, cmdParam); + if(scheduleTime != null){ + processInstance.setScheduleTime(scheduleTime); + } + processInstance.setCommandStartTime(command.getStartTime()); + processInstance.setLocations(processDefinition.getLocations()); + processInstance.setConnects(processDefinition.getConnects()); + // curing global params + processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( + processDefinition.getGlobalParamMap(), + processDefinition.getGlobalParamList(), + getCommandTypeIfComplement(processInstance, command), + processInstance.getScheduleTime())); + + //copy process define json to process instance + processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson()); + // set process instance priority + processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); + int workerGroupId = command.getWorkerGroupId() == 0 ? -1 : command.getWorkerGroupId(); + processInstance.setWorkerGroupId(workerGroupId); + processInstance.setTimeout(processDefinition.getTimeout()); + processInstance.setTenantId(processDefinition.getTenantId()); + return processInstance; + } + + /** + * get process tenant + * there is tenant id in definition, use the tenant of the definition. + * if there is not tenant id in the definiton or the tenant not exist + * use definition creator's tenant. + * @param tenantId + * @param userId + * @return + */ + public Tenant getTenantForProcess(int tenantId, int userId){ + Tenant tenant = null; + if(tenantId >= 0){ + tenant = tenantMapper.queryById(tenantId); + } + if(tenant == null){ + User user = userMapper.selectById(userId); + tenant = tenantMapper.queryById(user.getTenantId()); + } + return tenant; + } + + /** + * check command parameters is valid + * @param command + * @param cmdParam + * @return + */ + private Boolean checkCmdParam(Command command, Map cmdParam){ + if(command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType()== TaskDependType.TASK_PRE){ + if(cmdParam == null + || !cmdParam.containsKey(Constants.CMDPARAM_START_NODE_NAMES) + || cmdParam.get(Constants.CMDPARAM_START_NODE_NAMES).isEmpty()){ + logger.error(String.format("command node depend type is %s, but start nodes is null ", command.getTaskDependType().toString())); + return false; + } + } + return true; + } + + /** + * construct process instance according to one command. + * @param command + * @param host + * @return + */ + private ProcessInstance constructProcessInstance(Command command, String host){ + + ProcessInstance processInstance = null; + CommandType commandType = command.getCommandType(); + Map cmdParam = JSONUtils.toMap(command.getCommandParam()); + + ProcessDefinition processDefinition = null; + if(command.getProcessDefinitionId() != 0){ + processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId()); + if(processDefinition == null){ + logger.error(String.format("cannot find the work process define! define id : %d", command.getProcessDefinitionId())); + return null; + } + } + + if(cmdParam != null ){ + Integer processInstanceId = 0; + // recover from failure or pause tasks + if(cmdParam.containsKey(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING)) { + String processId = cmdParam.get(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING); + processInstanceId = Integer.parseInt(processId); + if (processInstanceId == 0) { + logger.error("command parameter is error, [ ProcessInstanceId ] is 0"); + return null; + } + }else if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){ + // sub process map + String pId = cmdParam.get(Constants.CMDPARAM_SUB_PROCESS); + processInstanceId = Integer.parseInt(pId); + }else if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD)){ + // waiting thread command + String pId = cmdParam.get(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD); + processInstanceId = Integer.parseInt(pId); + } + if(processInstanceId ==0){ + processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); + }else{ + processInstance = this.findProcessInstanceDetailById(processInstanceId); + } + processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId()); + processInstance.setProcessDefinition(processDefinition); + + //reset command parameter + if(processInstance.getCommandParam() != null){ + Map processCmdParam = JSONUtils.toMap(processInstance.getCommandParam()); + for(String key : processCmdParam.keySet()){ + if(!cmdParam.containsKey(key)){ + cmdParam.put(key,processCmdParam.get(key)); + } + } + } + // reset command parameter if sub process + if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){ + processInstance.setCommandParam(command.getCommandParam()); + } + }else{ + // generate one new process instance + processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); + } + if(!checkCmdParam(command, cmdParam)){ + logger.error("command parameter check failed!"); + return null; + } + + if(command.getScheduleTime() != null){ + processInstance.setScheduleTime(command.getScheduleTime()); + } + processInstance.setHost(host); + + ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXEUTION; + int runTime = processInstance.getRunTimes(); + switch (commandType){ + case START_PROCESS: + break; + case START_FAILURE_TASK_PROCESS: + // find failed tasks and init these tasks + List failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE); + List toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE); + List killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); + cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); + + failedList.addAll(killedList); + failedList.addAll(toleranceList); + for(Integer taskId : failedList){ + initTaskInstance(this.findTaskInstanceById(taskId)); + } + cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, + String.join(Constants.COMMA, convertIntListToString(failedList))); + processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); + processInstance.setRunTimes(runTime +1 ); + break; + case START_CURRENT_TASK_PROCESS: + break; + case RECOVER_WAITTING_THREAD: + break; + case RECOVER_SUSPENDED_PROCESS: + // find pause tasks and init task's state + cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); + List suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE); + List stopNodeList = findTaskIdByInstanceState(processInstance.getId(), + ExecutionStatus.KILL); + suspendedNodeList.addAll(stopNodeList); + for(Integer taskId : suspendedNodeList){ + // 把暂停状态初始化 + initTaskInstance(this.findTaskInstanceById(taskId)); + } + cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList))); + processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); + processInstance.setRunTimes(runTime +1); + break; + case RECOVER_TOLERANCE_FAULT_PROCESS: + // recover tolerance fault process + processInstance.setRecovery(Flag.YES); + runStatus = processInstance.getState(); + break; + case COMPLEMENT_DATA: + // delete all the valid tasks when complement data + List taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId()); + for(TaskInstance taskInstance : taskInstanceList){ + taskInstance.setFlag(Flag.NO); + this.updateTaskInstance(taskInstance); + } + break; + case REPEAT_RUNNING: + // delete the recover task names from command parameter + if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){ + cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); + processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); + } + // delete all the valid tasks when repeat running + List validTaskList = findValidTaskListByProcessId(processInstance.getId()); + for(TaskInstance taskInstance : validTaskList){ + taskInstance.setFlag(Flag.NO); + updateTaskInstance(taskInstance); + } + processInstance.setStartTime(new Date()); + processInstance.setEndTime(null); + processInstance.setRunTimes(runTime +1); + initComplementDataParam(processDefinition, processInstance, cmdParam); + break; + case SCHEDULER: + break; + default: + break; + } + processInstance.setState(runStatus); + return processInstance; + } + + /** + * return complement data if the process start with complement data + */ + private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command){ + if(CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()){ + return CommandType.COMPLEMENT_DATA; + }else{ + return command.getCommandType(); + } + } + + /** + * initialize complement data parameters + * @param processDefinition + * @param processInstance + * @param cmdParam + */ + private void initComplementDataParam(ProcessDefinition processDefinition, ProcessInstance processInstance, Map cmdParam) { + if(!processInstance.isComplementData()){ + return; + } + + Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE), + YYYY_MM_DD_HH_MM_SS); + processInstance.setScheduleTime(startComplementTime); + processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( + processDefinition.getGlobalParamMap(), + processDefinition.getGlobalParamList(), + CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); + + } + + /** + * set sub work process parameters. + * handle sub work process instance, update relation table and command parameters + * set sub work process flag, extends parent work process command parameters. + */ + public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance){ + String cmdParam = subProcessInstance.getCommandParam(); + if(StringUtils.isEmpty(cmdParam)){ + return subProcessInstance; + } + Map paramMap = JSONUtils.toMap(cmdParam); + // write sub process id into cmd param. + if(paramMap.containsKey(CMDPARAM_SUB_PROCESS) + && CMDPARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMDPARAM_SUB_PROCESS))){ + paramMap.remove(CMDPARAM_SUB_PROCESS); + paramMap.put(CMDPARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId())); + subProcessInstance.setCommandParam(JSONUtils.toJson(paramMap)); + subProcessInstance.setIsSubProcess(Flag.YES); + this.saveProcessInstance(subProcessInstance); + } + // copy parent instance user def params to sub process.. + String parentInstanceId = paramMap.get(CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID); + if(StringUtils.isNotEmpty(parentInstanceId)){ + ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId)); + if(parentInstance != null){ + subProcessInstance.setGlobalParams( + joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams())); + this.saveProcessInstance(subProcessInstance); + }else{ + logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam); + } + } + ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class); + if(processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0){ + return subProcessInstance; + } + // update sub process id to process map table + processInstanceMap.setProcessInstanceId(subProcessInstance.getId()); + + this.updateWorkProcessInstanceMap(processInstanceMap); + return subProcessInstance; + } + + /** + * join parent global params into sub process. + * only the keys doesn't in sub process global would be joined. + * @param parentGlobalParams + * @param subGlobalParams + * @return + */ + private String joinGlobalParams(String parentGlobalParams, String subGlobalParams){ + List parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class); + List subPropertyList = JSONUtils.toList(subGlobalParams, Property.class); + Map subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); + + for(Property parent : parentPropertyList){ + if(!subMap.containsKey(parent.getProp())){ + subPropertyList.add(parent); + } + } + return JSONUtils.toJson(subPropertyList); + } + + /** + * initialize task instance + * @param taskInstance + */ + private void initTaskInstance(TaskInstance taskInstance){ + + if(!taskInstance.isSubProcess()){ + if(taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure()){ + taskInstance.setFlag(Flag.NO); + updateTaskInstance(taskInstance); + return; + } + } + taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); + updateTaskInstance(taskInstance); + } + + /** + * submit task to mysql and task queue + * submit sub process to command + * @param taskInstance + * @return + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public TaskInstance submitTask(TaskInstance taskInstance, ProcessInstance processInstance){ + logger.info("start submit task : {}, instance id:{}, state: {}, ", + taskInstance.getName(), processInstance.getId(), processInstance.getState() ); + processInstance = this.findProcessInstanceDetailById(processInstance.getId()); + //submit to mysql + TaskInstance task= submitTaskInstanceToMysql(taskInstance, processInstance); + if(task.isSubProcess() && !task.getState().typeIsFinished()){ + ProcessInstanceMap processInstanceMap = setProcessInstanceMap(processInstance, task); + + TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class); + Map subProcessParam = JSONUtils.toMap(taskNode.getParams()); + Integer defineId = Integer.parseInt(subProcessParam.get(Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID)); + createSubWorkProcessCommand(processInstance, processInstanceMap, defineId, task); + }else if(!task.getState().typeIsFinished()){ + //submit to task queue + task.setProcessInstancePriority(processInstance.getProcessInstancePriority()); + submitTaskToQueue(task); + } + logger.info("submit task :{} state:{} complete, instance id:{} state: {} ", + taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState()); + return task; + } + + /** + * set work process instance map + * @param parentInstance + * @param parentTask + * @return + */ + private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask){ + ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId()); + if(processMap != null){ + return processMap; + }else if(parentInstance.getCommandType() == CommandType.REPEAT_RUNNING + || parentInstance.isComplementData()){ + // update current task id to map + // repeat running does not generate new sub process instance + processMap = findPreviousTaskProcessMap(parentInstance, parentTask); + if(processMap!= null){ + processMap.setParentTaskInstanceId(parentTask.getId()); + updateWorkProcessInstanceMap(processMap); + return processMap; + } + } + // new task + processMap = new ProcessInstanceMap(); + processMap.setParentProcessInstanceId(parentInstance.getId()); + processMap.setParentTaskInstanceId(parentTask.getId()); + createWorkProcessInstanceMap(processMap); + return processMap; + } + + /** + * find previous task work process map. + * @param parentProcessInstance + * @param parentTask + * @return + */ + private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance, + TaskInstance parentTask) { + + Integer preTaskId = 0; + List preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId()); + for(TaskInstance task : preTaskList){ + if(task.getName().equals(parentTask.getName())){ + preTaskId = task.getId(); + ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId); + if(map!=null){ + return map; + } + } + } + logger.info("sub process instance is not found,parent task:{},parent instance:{}", + parentTask.getId(), parentProcessInstance.getId()); + return null; + } + + /** + * create sub work process command + * @param parentProcessInstance + * @param instanceMap + * @param childDefineId + * @param task + */ + private void createSubWorkProcessCommand(ProcessInstance parentProcessInstance, + ProcessInstanceMap instanceMap, + Integer childDefineId, TaskInstance task){ + ProcessInstance childInstance = findSubProcessInstance(parentProcessInstance.getId(), task.getId()); + + CommandType fatherType = parentProcessInstance.getCommandType(); + CommandType commandType = fatherType; + if(childInstance == null || commandType == CommandType.REPEAT_RUNNING){ + String fatherHistoryCommand = parentProcessInstance.getHistoryCmd(); + // sub process must begin with schedule/complement data + // if father begin with scheduler/complement data + if(fatherHistoryCommand.startsWith(CommandType.SCHEDULER.toString()) || + fatherHistoryCommand.startsWith(CommandType.COMPLEMENT_DATA.toString())){ + commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]); + } + } + + if(childInstance != null){ + childInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); + updateProcessInstance(childInstance); + } + // set sub work process command + String processMapStr = JSONUtils.toJson(instanceMap); + Map cmdParam = JSONUtils.toMap(processMapStr); + + if(commandType == CommandType.COMPLEMENT_DATA || + (childInstance != null && childInstance.isComplementData())){ + Map parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam()); + String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE); + String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE); + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime); + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime); + processMapStr = JSONUtils.toJson(cmdParam); + } + + updateSubProcessDefinitionByParent(parentProcessInstance, childDefineId); + + Command command = new Command(); + command.setWarningType(parentProcessInstance.getWarningType()); + command.setWarningGroupId(parentProcessInstance.getWarningGroupId()); + command.setFailureStrategy(parentProcessInstance.getFailureStrategy()); + command.setProcessDefinitionId(childDefineId); + command.setScheduleTime(parentProcessInstance.getScheduleTime()); + command.setExecutorId(parentProcessInstance.getExecutorId()); + command.setCommandParam(processMapStr); + command.setCommandType(commandType); + command.setProcessInstancePriority(parentProcessInstance.getProcessInstancePriority()); + createCommand(command); + logger.info("sub process command created: {} ", command.toString()); + } + + private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) { + ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId()); + ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId); + if(childDefinition != null && fatherDefinition != null){ + childDefinition.setReceivers(fatherDefinition.getReceivers()); + childDefinition.setReceiversCc(fatherDefinition.getReceiversCc()); + processDefineMapper.updateById(childDefinition); + } + } + + /** + * submit task to mysql + * @param taskInstance + * @return + */ + public TaskInstance submitTaskInstanceToMysql(TaskInstance taskInstance, ProcessInstance processInstance){ + ExecutionStatus processInstanceState = processInstance.getState(); + + if(taskInstance.getState().typeIsFailure()){ + if(taskInstance.isSubProcess()){ + taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 ); + }else { + + if( processInstanceState != ExecutionStatus.READY_STOP + && processInstanceState != ExecutionStatus.READY_PAUSE){ + // failure task set invalid + taskInstance.setFlag(Flag.NO); + updateTaskInstance(taskInstance); + // crate new task instance + if(taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE){ + taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 ); + } + taskInstance.setEndTime(null); + taskInstance.setStartTime(new Date()); + taskInstance.setFlag(Flag.YES); + taskInstance.setHost(null); + taskInstance.setId(0); + } + } + } + taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority()); + taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState)); + taskInstance.setSubmitTime(new Date()); + saveTaskInstance(taskInstance); + return taskInstance; + } + + /** + * submit task to queue + * @param task + */ + public Boolean submitTaskToQueue(TaskInstance task) { + + try{ + // task cannot submit when running + if(task.getState() == ExecutionStatus.RUNNING_EXEUTION){ + logger.info(String.format("submit to task queue, but task [%s] state already be running. ", task.getName())); + return true; + } + if(checkTaskExistsInTaskQueue(task)){ + logger.info(String.format("submit to task queue, but task [%s] already exists in the queue.", task.getName())); + return true; + } + logger.info("task ready to queue: {}" , task); + taskQueue.add(SCHEDULER_TASKS_QUEUE, taskZkInfo(task)); + logger.info(String.format("master insert into queue success, task : %s", task.getName()) ); + return true; + }catch (Exception e){ + logger.error("submit task to queue Exception: ", e); + logger.error("task queue error : %s", JSONUtils.toJson(task)); + return false; + + } + } + + /** + * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_${task executed by ip1},${ip2}... + * + * The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low. + * + * 流程实例优先级_流程实例id_任务优先级_任务id_任务执行机器ip1,ip2... high <- low + * + * @param taskInstance + * @return + */ + private String taskZkInfo(TaskInstance taskInstance) { + + int taskWorkerGroupId = getTaskWorkerGroupId(taskInstance); + + StringBuilder sb = new StringBuilder(100); + + sb.append(taskInstance.getProcessInstancePriority().ordinal()).append(Constants.UNDERLINE) + .append(taskInstance.getProcessInstanceId()).append(Constants.UNDERLINE) + .append(taskInstance.getTaskInstancePriority().ordinal()).append(Constants.UNDERLINE) + .append(taskInstance.getId()).append(Constants.UNDERLINE); + + if(taskWorkerGroupId > 0){ + //not to find data from db + WorkerGroup workerGroup = queryWorkerGroupById(taskWorkerGroupId); + if(workerGroup == null ){ + logger.info("task {} cannot find the worker group, use all worker instead.", taskInstance.getId()); + + sb.append(Constants.DEFAULT_WORKER_ID); + return sb.toString(); + } + + String ips = workerGroup.getIpList(); + + if(StringUtils.isBlank(ips)){ + logger.error("task:{} worker group:{} parameters(ip_list) is null, this task would be running on all workers", + taskInstance.getId(), workerGroup.getId()); + sb.append(Constants.DEFAULT_WORKER_ID); + return sb.toString(); + } + + StringBuilder ipSb = new StringBuilder(100); + String[] ipArray = ips.split(COMMA); + + for (String ip : ipArray) { + long ipLong = IpUtils.ipToLong(ip); + ipSb.append(ipLong).append(COMMA); + } + + if(ipSb.length() > 0) { + ipSb.deleteCharAt(ipSb.length() - 1); + } + + sb.append(ipSb); + }else{ + sb.append(Constants.DEFAULT_WORKER_ID); + } + + + return sb.toString(); + } + + /** + * get submit task instance state by the work process state + * cannot modify the task state when running/kill/submit success, or this + * task instance is already exists in task queue . + * return pause if work process state is ready pause + * return stop if work process state is ready stop + * if all of above are not satisfied, return submit success + * + * @param taskInstance + * @param processInstanceState + * @return + */ + public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState){ + ExecutionStatus state = taskInstance.getState(); + if( + // running or killed + // the task already exists in task queue + // return state + state == ExecutionStatus.RUNNING_EXEUTION + || state == ExecutionStatus.KILL + || checkTaskExistsInTaskQueue(taskInstance) + ){ + return state; + } + //return pasue /stop if process instance state is ready pause / stop + // or return submit success + if( processInstanceState == ExecutionStatus.READY_PAUSE){ + state = ExecutionStatus.PAUSE; + }else if(processInstanceState == ExecutionStatus.READY_STOP) { + state = ExecutionStatus.KILL; + }else{ + state = ExecutionStatus.SUBMITTED_SUCCESS; + } + return state; + } + + /** + * check the task instance existing in queue + * @return + */ + public boolean checkTaskExistsInTaskQueue(TaskInstance task){ + if(task.isSubProcess()){ + return false; + } + + String taskZkInfo = taskZkInfo(task); + + return taskQueue.checkTaskExists(SCHEDULER_TASKS_QUEUE, taskZkInfo); + } + + /** + * create a new process instance + * @param processInstance + */ + public void createProcessInstance(ProcessInstance processInstance){ + + if (processInstance != null){ + processInstanceMapper.insert(processInstance); + } + } + + /** + * insert or update work process instance to data base + * @param workProcessInstance + */ + public void saveProcessInstance(ProcessInstance workProcessInstance){ + + if (workProcessInstance == null){ + logger.error("save error, process instance is null!"); + return ; + } + //创建流程实例 + if(workProcessInstance.getId() != 0){ + processInstanceMapper.updateById(workProcessInstance); + }else{ + createProcessInstance(workProcessInstance); + } + } + + /** + * insert or update command + * @param command + * @return + */ + public int saveCommand(Command command){ + if(command.getId() != 0){ + return commandMapper.updateById(command); + }else{ + return commandMapper.insert(command); + } + } + + /** + * insert or update task instance + * @param taskInstance + * @return + */ + public boolean saveTaskInstance(TaskInstance taskInstance){ + if(taskInstance.getId() != 0){ + return updateTaskInstance(taskInstance); + }else{ + return createTaskInstance(taskInstance); + } + } + + /** + * insert task instance + * @param taskInstance + * @return + */ + public boolean createTaskInstance(TaskInstance taskInstance) { + int count = taskInstanceMapper.insert(taskInstance); + return count > 0; + } + + /** + * update task instance + * @param taskInstance + * @return + */ + public boolean updateTaskInstance(TaskInstance taskInstance){ + int count = taskInstanceMapper.updateById(taskInstance); + return count > 0; + } + /** + * delete a command by id + * @param id + */ + public void delCommandByid(int id) { + commandMapper.deleteById(id); + } + + public TaskInstance findTaskInstanceById(Integer taskId){ + return taskInstanceMapper.queryById(taskId); + } + + + /** + * package task instance,associate processInstance and processDefine + * @param taskInstId + * @return + */ + public TaskInstance getTaskInstanceRelationByTaskId(int taskInstId){ + // get task instance + TaskInstance taskInstance = findTaskInstanceById(taskInstId); + // get process instance + ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); + // get process define + ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId()); + + taskInstance.setProcessInstance(processInstance); + taskInstance.setProcessDefine(processDefine); + return taskInstance; + } + + + /** + * get id list by task state + * @param instanceId + * @param state + * @return + */ + public List findTaskIdByInstanceState(int instanceId, ExecutionStatus state){ + return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal()); + } + + /** + * + * find valid task list by process definition id + * @param processInstanceId + * @return + */ + public List findValidTaskListByProcessId(Integer processInstanceId){ + return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES); + } + + /** + * find previous task list by work process id + * @param workProcessInstanceId + * @return + */ + public List findPreviousTaskListByWorkProcessId(Integer workProcessInstanceId){ + return taskInstanceMapper.findValidTaskListByProcessId(workProcessInstanceId, Flag.NO); + } + + /** + * update work process instance map + * @param processInstanceMap + * @return + */ + public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){ + return processInstanceMapMapper.updateById(processInstanceMap); + } + + + /** + * create work process instance map + * @param processInstanceMap + * @return + */ + public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){ + Integer count = 0; + if(processInstanceMap !=null){ + return processInstanceMapMapper.insert(processInstanceMap); + } + return count; + } + + /** + * find work process map by parent process id and parent task id. + * @param parentWorkProcessId + * @param parentTaskId + * @return + */ + public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId){ + return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId); + } + + /** + * delete work process map by parent process id + * @param parentWorkProcessId + * @return + */ + public int deleteWorkProcessMapByParentId(int parentWorkProcessId){ + return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId); + + } + + public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId){ + ProcessInstance processInstance = null; + ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId); + if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){ + return processInstance; + } + processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId()); + return processInstance; + } + public ProcessInstance findParentProcessInstance(Integer subProcessId) { + ProcessInstance processInstance = null; + ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId); + if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){ + return processInstance; + } + processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId()); + return processInstance; + } + + + + /** + * change task state + * @param state + * @param startTime + * @param host + * @param executePath + */ + public void changeTaskState(ExecutionStatus state, Date startTime, String host, + String executePath, + String logPath, + int taskInstId) { + TaskInstance taskInstance = taskInstanceMapper.queryById(taskInstId); + taskInstance.setState(state); + taskInstance.setStartTime(startTime); + taskInstance.setHost(host); + taskInstance.setExecutePath(executePath); + taskInstance.setLogPath(logPath); + saveTaskInstance(taskInstance); + } + + /** + * update process instance + * @param instance + * @return + */ + public int updateProcessInstance(ProcessInstance instance){ + + return processInstanceMapper.updateById(instance); + } + + /** + * update the process instance + * @param processInstanceId + * @param processJson + * @param globalParams + * @param scheduleTime + * @param flag + * @param locations + * @param connects + * @return + */ + public int updateProcessInstance(Integer processInstanceId, String processJson, + String globalParams, Date scheduleTime, Flag flag, + String locations, String connects){ + ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); + if(processInstance!= null){ + processInstance.setProcessInstanceJson(processJson); + processInstance.setGlobalParams(globalParams); + processInstance.setScheduleTime(scheduleTime); + processInstance.setLocations(locations); + processInstance.setConnects(connects); + return processInstanceMapper.updateById(processInstance); + } + return 0; + } + + /** + * change task state + * @param state + * @param endTime + */ + public void changeTaskState(ExecutionStatus state, + Date endTime, + int taskInstId) { + TaskInstance taskInstance = taskInstanceMapper.queryById(taskInstId); + taskInstance.setState(state); + taskInstance.setEndTime(endTime); + saveTaskInstance(taskInstance); + } + + /** + * convert integer list to string list + * @param intList + * @return + */ + public List convertIntListToString(List intList){ + if(intList == null){ + return new ArrayList<>(); + } + List result = new ArrayList(intList.size()); + for(Integer intVar : intList){ + result.add(String.valueOf(intVar)); + } + return result; + } + + /** + * set task + * 根据任务实例id设置pid + * @param taskInstId + * @param pid + */ + public void updatePidByTaskInstId(int taskInstId, int pid) { + TaskInstance taskInstance = taskInstanceMapper.queryById(taskInstId); + taskInstance.setPid(pid); + taskInstance.setAppLink(""); + saveTaskInstance(taskInstance); + } + + /** + * update pid and app links field by task instance id + * @param taskInstId + * @param pid + */ + public void updatePidByTaskInstId(int taskInstId, int pid,String appLinks) { + + TaskInstance taskInstance = taskInstanceMapper.queryById(taskInstId); + taskInstance.setPid(pid); + taskInstance.setAppLink(appLinks); + saveTaskInstance(taskInstance); + } + + /** + * query ProcessDefinition by name + * + * @see ProcessDefinition + */ + public ProcessDefinition findProcessDefineByName(int projectId, String name) { + ProcessDefinition projectFlow = processDefineMapper.queryByDefineName(projectId, name); + return projectFlow; + } + + /** + * query Schedule

+ * + * @see Schedule + */ + public Schedule querySchedule(int id) { + return scheduleMapper.selectById(id); + } + + public List queryNeedFailoverProcessInstances(String host){ + + return processInstanceMapper.queryByHostAndStatus(host, stateArray); + } + + + + + /** + * process need failover process instance + * @param processInstance + */ + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) + public void processNeedFailoverProcessInstances(ProcessInstance processInstance){ + + + //1 update processInstance host is null + processInstance.setHost("null"); + processInstanceMapper.updateById(processInstance); + + //2 insert into recover command + Command cmd = new Command(); + cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId()); + cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId())); + cmd.setExecutorId(processInstance.getExecutorId()); + cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); + createCommand(cmd); + + } + + /** + * query all need failover task instances by host + * @param host + * @return + */ + public List queryNeedFailoverTaskInstances(String host){ + return taskInstanceMapper.queryByHostAndStatus(host, + StringUtils.join(stateArray, ",")); + } + + /** + * find data source by id + * @param id + * @return + */ + public DataSource findDataSourceById(int id){ + return dataSourceMapper.selectById(id); + } + + + /** + * update process instance state by id + * @param processInstanceId + * @param executionStatus + * @return + */ + public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { + ProcessInstance instance = processInstanceMapper.selectById(processInstanceId); + instance.setState(executionStatus); + return processInstanceMapper.updateById(instance); + + } + + /** + * find process instance by the task id + * @param taskId + * @return + */ + public ProcessInstance findProcessInstanceByTaskId(int taskId){ + TaskInstance taskInstance = taskInstanceMapper.selectById(taskId); + if(taskInstance!= null){ + return processInstanceMapper.selectById(taskInstance.getProcessInstanceId()); + } + return null; + } + + /** + * find udf function list by id list string + * @param ids + * @return + */ + public List queryUdfFunListByids(String ids){ + + return udfFuncMapper.queryUdfByIdStr(ids, null); + } + + /** + * find tenant code by resource name + * @param resName + * @return + */ + public String queryTenantCodeByResName(String resName){ + return resourceMapper.queryTenantCodeByResourceName(resName); + } + + /** + * find schedule list by process define id. + * @param ids + * @return + */ + public List selectAllByProcessDefineId(int[] ids){ + return scheduleMapper.selectAllByProcessDefineArray( + ids); + } + + /** + * get dependency cycle by work process define id and scheduler fire time + * + * @param masterId + * @param processDefinitionId + * @param scheduledFireTime 任务调度预计触发的时间 + * @return + * @throws Exception + */ + public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception { + List list = getCycleDependencies(masterId,new int[]{processDefinitionId},scheduledFireTime); + return list.size()>0 ? list.get(0) : null; + + } + + /** + * + * get dependency cycle list by work process define id list and scheduler fire time + * @param masterId + * @param ids + * @param scheduledFireTime 任务调度预计触发的时间 + * @return + * @throws Exception + */ + public List getCycleDependencies(int masterId,int[] ids,Date scheduledFireTime) throws Exception { + List cycleDependencyList = new ArrayList(); + if(ArrayUtils.isEmpty(ids)){ + logger.warn("ids[] is empty!is invalid!"); + return cycleDependencyList; + } + if(scheduledFireTime == null){ + logger.warn("scheduledFireTime is null!is invalid!"); + return cycleDependencyList; + } + + + String strCrontab = ""; + CronExpression depCronExpression; + Cron depCron; + List list; + List schedules = this.selectAllByProcessDefineId(ids); + // 遍历所有的调度信息 + for(Schedule depSchedule:schedules){ + strCrontab = depSchedule.getCrontab(); + depCronExpression = CronUtils.parse2CronExpression(strCrontab); + depCron = CronUtils.parse2Cron(strCrontab); + CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron); + if(cycleEnum == null){ + logger.error("{} is not valid",strCrontab); + continue; + } + Calendar calendar = Calendar.getInstance(); + switch (cycleEnum){ + /*case MINUTE: + calendar.add(Calendar.MINUTE,-61);*/ + case HOUR: + calendar.add(Calendar.HOUR,-25); + break; + case DAY: + calendar.add(Calendar.DATE,-32); + break; + case WEEK: + calendar.add(Calendar.DATE,-32); + break; + case MONTH: + calendar.add(Calendar.MONTH,-13); + break; + default: + logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleEnum.name()); + continue; + } + Date start = calendar.getTime(); + + if(depSchedule.getProcessDefinitionId() == masterId){ + list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression); + }else { + list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression); + } + if(list.size()>=1){ + start = list.get(list.size()-1); + CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(),start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum); + cycleDependencyList.add(dependency); + } + + } + return cycleDependencyList; + } + + /** + * find last scheduler process instance in the date interval + * @param definitionId + * @param dateInterval + * @return + */ + public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) { + return processInstanceMapper.queryLastSchedulerProcess(definitionId, + DateUtils.dateToString(dateInterval.getStartTime()), + DateUtils.dateToString(dateInterval.getEndTime())); + } + + public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) { + return processInstanceMapper.queryLastManualProcess(definitionId, + dateInterval.getStartTime(), + dateInterval.getEndTime()); + } + + public ProcessInstance findLastRunningProcess(int definitionId, DateInterval dateInterval) { + return processInstanceMapper.queryLastRunningProcess(definitionId, + DateUtils.dateToString(dateInterval.getStartTime()), + DateUtils.dateToString(dateInterval.getEndTime()), + stateArray); + } + + /** + * query user queue by process instance id + * @param processInstanceId + * @return + */ + public String queryUserQueueByProcessInstanceId(int processInstanceId){ + + String queue = ""; + ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId); + if(processInstance == null){ + return queue; + } + User executor = userMapper.selectById(processInstance.getExecutorId()); + if(executor != null){ + queue = executor.getQueue(); + } + return queue; + } + + /** + * query worker group by id + * @param workerGroupId + * @return + */ + public WorkerGroup queryWorkerGroupById(int workerGroupId){ + + return workerGroupMapper.selectById(workerGroupId); + } + + /** + * get task worker group id + * + * @param taskInstance + * @return + */ + public int getTaskWorkerGroupId(TaskInstance taskInstance) { + int taskWorkerGroupId = taskInstance.getWorkerGroupId(); + int processInstanceId = taskInstance.getProcessInstanceId(); + + ProcessInstance processInstance = findProcessInstanceById(processInstanceId); + + if(processInstance == null){ + logger.error("cannot find the task:{} process instance", taskInstance.getId()); + return Constants.DEFAULT_WORKER_ID; + } + int processWorkerGroupId = processInstance.getWorkerGroupId(); + + taskWorkerGroupId = (taskWorkerGroupId <= 0 ? processWorkerGroupId : taskWorkerGroupId); + return taskWorkerGroupId; + } + + public List getProjectListHavePerm(int userId){ + List createProjects = projectMapper.queryProjectCreatedByUser(userId); + List authedProjects = projectMapper.queryAuthedProjectListByUserId(userId); + + if(createProjects == null){ + createProjects = new ArrayList<>(); + } + + if(authedProjects != null){ + createProjects.addAll(authedProjects); + } + return createProjects; + } + + public List getProjectIdListHavePerm(int userId){ + + List projectIdList = new ArrayList<>(); + for(Project project : getProjectListHavePerm(userId)){ + projectIdList.add(project.getId()); + } + return projectIdList; + } + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java new file mode 100644 index 0000000000..2c522a2183 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java @@ -0,0 +1,299 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.TaskRecordStatus; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.entity.TaskRecord; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + + +/** + * task record dao + */ +public class TaskRecordDao { + + + private static Logger logger = LoggerFactory.getLogger(TaskRecordDao.class.getName()); + + + + /** + * load conf file + */ + private static Configuration conf; + + static { + try { + conf = new PropertiesConfiguration(Constants.DATA_SOURCE_PROPERTIES); + }catch (ConfigurationException e){ + logger.error("load configuration excetpion",e); + System.exit(1); + } + } + + + /** + * get task record flag + * @return + */ + public static boolean getTaskRecordFlag(){ + return conf.getBoolean(Constants.TASK_RECORD_FLAG); + } + /** + * create connection + * @return + */ + private static Connection getConn() { + if(!conf.getBoolean(Constants.TASK_RECORD_FLAG)){ + return null; + } + String driver = "com.mysql.jdbc.Driver"; + String url = conf.getString(Constants.TASK_RECORD_URL); + String username = conf.getString(Constants.TASK_RECORD_USER); + String password = conf.getString(Constants.TASK_RECORD_PWD); + Connection conn = null; + try { + //classLoader,加载对应驱动 + Class.forName(driver); + conn = DriverManager.getConnection(url, username, password); + } catch (ClassNotFoundException e) { + logger.error("Exception ", e); + } catch (SQLException e) { + logger.error("Exception ", e); + } + return conn; + } + + /** + * generate where sql string + * @param filterMap + * @return + */ + private static String getWhereString(Map filterMap) + { + if(filterMap.size() ==0){ + return ""; + } + + String result = " where 1=1 "; + + Object taskName = filterMap.get("taskName"); + if(taskName != null && StringUtils.isNotEmpty(taskName.toString())){ + result += " and PROC_NAME like concat('%', '" + taskName.toString() + "', '%') "; + } + + Object taskDate = filterMap.get("taskDate"); + if(taskDate != null && StringUtils.isNotEmpty(taskDate.toString())){ + result += " and PROC_DATE='" + taskDate.toString() + "'"; + } + + Object state = filterMap.get("state"); + if(state != null && StringUtils.isNotEmpty(state.toString())){ + result += " and NOTE='" + state.toString() + "'"; + } + + Object sourceTable = filterMap.get("sourceTable"); + if(sourceTable!= null && StringUtils.isNotEmpty(sourceTable.toString())){ + result += " and SOURCE_TAB like concat('%', '" + sourceTable.toString()+ "', '%')"; + } + + Object targetTable = filterMap.get("targetTable"); + if(sourceTable!= null && StringUtils.isNotEmpty(targetTable.toString())){ + result += " and TARGET_TAB like concat('%', '"+ targetTable.toString()+"', '%') " ; + } + + Object start = filterMap.get("startTime"); + if(start != null && StringUtils.isNotEmpty(start.toString())){ + result += " and STARTDATE>='" + start.toString() + "'"; + } + + Object end = filterMap.get("endTime"); + if(end != null && StringUtils.isNotEmpty(end.toString())){ + result += " and ENDDATE>='" + end.toString()+ "'"; + } + return result; + } + + /** + * count task record + * @param filterMap + * @return + */ + public static int countTaskRecord(Map filterMap, String table){ + + int count = 0; + Connection conn = null; + try { + conn = getConn(); + if(conn == null){ + return count; + } + String sql = String.format("select count(1) as count from %s", table); + sql += getWhereString(filterMap); + PreparedStatement pstmt; + pstmt = conn.prepareStatement(sql); + ResultSet rs = pstmt.executeQuery(); + while(rs.next()){ + count = rs.getInt("count"); + break; + } + } catch (SQLException e) { + logger.error("Exception ", e); + }finally { + try { + if(conn != null){ + conn.close(); + } + } catch (SQLException e) { + logger.error("Exception ", e); + } + } + return count; + } + + /** + * query task record by filter map paging + * @param filterMap + * @return + */ + public static List queryAllTaskRecord(Map filterMap , String table) { + + String sql = String.format("select * from %s", table); + sql += getWhereString(filterMap); + + int offset = Integer.parseInt(filterMap.get("offset")); + int pageSize = Integer.parseInt(filterMap.get("pageSize")); + sql += String.format(" order by STARTDATE desc limit %d,%d", offset, pageSize); + + List recordList = new ArrayList<>(); + try{ + recordList = getQueryResult(sql); + }catch (Exception e){ + logger.error("Exception ", e); + } + return recordList; + } + + /** + * convert result set to task record + * @param resultSet + * @return + * @throws SQLException + */ + private static TaskRecord convertToTaskRecord(ResultSet resultSet) throws SQLException { + + TaskRecord taskRecord = new TaskRecord(); + taskRecord.setId(resultSet.getInt("ID")); + taskRecord.setProcId(resultSet.getInt("PROC_ID")); + taskRecord.setProcName(resultSet.getString("PROC_NAME")); + taskRecord.setProcDate(resultSet.getString("PROC_DATE")); + taskRecord.setStartTime(DateUtils.stringToDate(resultSet.getString("STARTDATE"))); + taskRecord.setEndTime(DateUtils.stringToDate(resultSet.getString("ENDDATE"))); + taskRecord.setResult(resultSet.getString("RESULT")); + taskRecord.setDuration(resultSet.getInt("DURATION")); + taskRecord.setNote(resultSet.getString("NOTE")); + taskRecord.setSchema(resultSet.getString("SCHEMA")); + taskRecord.setJobId(resultSet.getString("JOB_ID")); + taskRecord.setSourceTab(resultSet.getString("SOURCE_TAB")); + taskRecord.setSourceRowCount(resultSet.getLong("SOURCE_ROW_COUNT")); + taskRecord.setTargetTab(resultSet.getString("TARGET_TAB")); + taskRecord.setTargetRowCount(resultSet.getLong("TARGET_ROW_COUNT")); + taskRecord.setErrorCode(resultSet.getString("ERROR_CODE")); + return taskRecord; + } + + /** + * query task list by select sql + * @param selectSql + * @return + */ + private static List getQueryResult(String selectSql) { + List recordList = new ArrayList<>(); + Connection conn = null; + try { + conn = getConn(); + if(conn == null){ + return recordList; + } + PreparedStatement pstmt; + pstmt = conn.prepareStatement(selectSql); + ResultSet rs = pstmt.executeQuery(); + + while(rs.next()){ + TaskRecord taskRecord = convertToTaskRecord(rs); + recordList.add(taskRecord); + } + } catch (SQLException e) { + logger.error("Exception ", e); + }finally { + try { + if(conn != null){ + conn.close(); + } + } catch (SQLException e) { + logger.error("Exception ", e); + } + } + return recordList; + } + + /** + * according to procname and procdate query task record + * @param procName + * @param procDate + * @return + */ + public static TaskRecordStatus getTaskRecordState(String procName,String procDate){ + String sql = String.format("SELECT * FROM eamp_hive_log_hd WHERE PROC_NAME='%s' and PROC_DATE like '%s'" + ,procName,procDate + "%"); + List taskRecordList = getQueryResult(sql); + + // contains no record and sql exception + if (CollectionUtils.isEmpty(taskRecordList)){ + // exception + return TaskRecordStatus.EXCEPTION; + }else if (taskRecordList.size() > 1){ + return TaskRecordStatus.EXCEPTION; + }else { + TaskRecord taskRecord = taskRecordList.get(0); + if (taskRecord == null){ + return TaskRecordStatus.EXCEPTION; + } + Long targetRowCount = taskRecord.getTargetRowCount(); + if (targetRowCount <= 0){ + return TaskRecordStatus.FAILURE; + }else { + return TaskRecordStatus.SUCCESS; + } + + } + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/config/MybatisPlusConfig.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/config/MybatisPlusConfig.java new file mode 100644 index 0000000000..340960bb81 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/config/MybatisPlusConfig.java @@ -0,0 +1,17 @@ +package org.apache.dolphinscheduler.dao.config; + +import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; +import org.mybatis.spring.annotation.MapperScan; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + + +@Configuration +@MapperScan("cn.escheduler.*.mapper") +public class MybatisPlusConfig { + @Bean + public PaginationInterceptor paginationInterceptor() { + return new PaginationInterceptor(); + } + +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/config/YmlConfig.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/config/YmlConfig.java new file mode 100644 index 0000000000..0caf7bc826 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/config/YmlConfig.java @@ -0,0 +1,59 @@ +package org.apache.dolphinscheduler.dao.config; + + + +import org.yaml.snakeyaml.*; + +import java.io.InputStream; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * Created by qiaozhanwei on 2019/9/17. + */ +public class YmlConfig { + + private static Map allMap=new HashMap(); + static { + Yaml yaml = new Yaml(); + InputStream inputStream = YmlConfig.class.getResourceAsStream("/application.yml"); + Iterator result = yaml.loadAll(inputStream).iterator(); + while(result.hasNext()){ + Map map=(Map)result.next(); + iteratorYml( map,null); + } + } + + public static void main(String[] args) { + String ss = allMap.get("spring.datasource.url"); + System.out.println(ss); + } + + public static void iteratorYml(Map map,String key){ + Iterator iterator = map.entrySet().iterator(); + while(iterator.hasNext()){ + Map.Entry entry = (Map.Entry) iterator.next(); + Object key2 = entry.getKey(); + Object value = entry.getValue(); + if(value instanceof LinkedHashMap){ + if(key==null){ + iteratorYml((Map)value,key2.toString()); + }else{ + iteratorYml((Map)value,key+"."+key2.toString()); + } + } + if(value instanceof String){ + if(key==null){ + allMap.put(key2.toString(), value.toString()); + } + if(key!=null){ + allMap.put(key+"."+key2.toString(), value.toString()); + } + } + } + + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java new file mode 100644 index 0000000000..6737c172e0 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.datasource; + +import com.alibaba.druid.pool.DruidDataSource; +import org.apache.ibatis.mapping.Environment; +import org.apache.ibatis.session.Configuration; +import org.apache.ibatis.session.SqlSession; +import org.apache.ibatis.session.SqlSessionFactory; +import org.apache.ibatis.session.SqlSessionFactoryBuilder; +import org.apache.ibatis.transaction.TransactionFactory; +import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory; +import org.mybatis.spring.SqlSessionTemplate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.sql.DataSource; + + + +/** + * data source connection factory + */ +public class ConnectionFactory { + private static final Logger logger = LoggerFactory.getLogger(ConnectionFactory.class); + + private static SqlSessionFactory sqlSessionFactory; + + /** + * get the data source + */ + public static DruidDataSource getDataSource() { + DruidDataSource druidDataSource = new DruidDataSource(); + + druidDataSource.setDriverClassName("com.mysql.jdbc.Driver"); + druidDataSource.setUrl("jdbc:mysql://192.168.220.188:3306/escheduler?useUnicode=true&characterEncoding=UTF-8"); + druidDataSource.setUsername("root"); + druidDataSource.setPassword("root@123"); + druidDataSource.setInitialSize(5); + druidDataSource.setMinIdle(5); + druidDataSource.setMaxActive(20); + druidDataSource.setMaxWait(60000); + druidDataSource.setTimeBetweenEvictionRunsMillis(60000); + druidDataSource.setMinEvictableIdleTimeMillis(300000); + druidDataSource.setValidationQuery("SELECT 1"); + return druidDataSource; + } + + /** + * get sql session factory + */ + public static SqlSessionFactory getSqlSessionFactory() { + if (sqlSessionFactory == null) { + synchronized (ConnectionFactory.class) { + if (sqlSessionFactory == null) { + DataSource dataSource = getDataSource(); + TransactionFactory transactionFactory = new JdbcTransactionFactory(); + + Environment environment = new Environment("development", transactionFactory, dataSource); + + Configuration configuration = new Configuration(environment); + configuration.setLazyLoadingEnabled(true); + configuration.addMappers("cn.escheduler.dao.mapper"); + + + SqlSessionFactoryBuilder builder = new SqlSessionFactoryBuilder(); + sqlSessionFactory = builder.build(configuration); + } + } + } + + return sqlSessionFactory; + } + + /** + * get sql session + */ + public static SqlSession getSqlSession() { + return new SqlSessionTemplate(getSqlSessionFactory()); + } + + public static T getMapper(Class type){ + return getSqlSession().getMapper(type); + } +} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/datasource/DatabaseConfiguration.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DatabaseConfiguration.java similarity index 100% rename from escheduler-dao/src/main/java/cn/escheduler/dao/datasource/DatabaseConfiguration.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DatabaseConfiguration.java diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AccessToken.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AccessToken.java new file mode 100644 index 0000000000..11d50a5b08 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AccessToken.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +@Data +@TableName("t_escheduler_access_token") +public class AccessToken { + + @TableId(value="id", type=IdType.AUTO) + private int id; + + + @TableField("user_id") + private int userId; + + @TableField("token") + private String token; + + @TableField(exist = false) + private String userName; + + @TableField("expire_time") + private Date expireTime; + + @TableField("create_time") + private Date createTime; + + @TableField("update_time") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public String getToken() { + return token; + } + + public void setToken(String token) { + this.token = token; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public Date getExpireTime() { + return expireTime; + } + + public void setExpireTime(Date expireTime) { + this.expireTime = expireTime; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Alert.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Alert.java new file mode 100644 index 0000000000..f5c56656d7 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Alert.java @@ -0,0 +1,238 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.enums.ShowType; + +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +/** + * alert + */ +@Data +@TableName("t_escheduler_alert") +public class Alert { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * title + */ + private String title; + + /** + * show type + */ + private ShowType showType; + /** + * content + */ + private String content; + + /** + * alert type + */ + private AlertType alertType; + + + + /** + * alert status + */ + private AlertStatus alertStatus; + + /** + * log + */ + private String log; + + /** + * alert group id + */ + @TableField("alertgroup_id") + private int alertGroupId; + + + /** + * receivers + */ + private String receivers; + + /** + * receivers cc + */ + private String receiversCc; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + + @TableField(exist = false) + private Map info = new HashMap<>(); + + public Map getInfo() { + return info; + } + + public void setInfo(Map info) { + this.info = info; + } + + public Alert() { + } + + public Alert(int id, String title) { + this.id = id; + this.title = title; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public ShowType getShowType() { + return showType; + } + + public void setShowType(ShowType showType) { + this.showType = showType; + } + + public String getContent() { + return content; + } + + public void setContent(String content) { + this.content = content; + } + + public AlertType getAlertType() { + return alertType; + } + + public void setAlertType(AlertType alertType) { + this.alertType = alertType; + } + + + public AlertStatus getAlertStatus() { + return alertStatus; + } + + public void setAlertStatus(AlertStatus alertStatus) { + this.alertStatus = alertStatus; + } + + public String getLog() { + return log; + } + + public void setLog(String log) { + this.log = log; + } + + public int getAlertGroupId() { + return alertGroupId; + } + + public void setAlertGroupId(int alertGroupId) { + this.alertGroupId = alertGroupId; + } + + public Date getCreateTime() { + return createTime; + } + + public String getReceivers() { + return receivers; + } + + public void setReceivers(String receivers) { + this.receivers = receivers; + } + + public String getReceiversCc() { + return receiversCc; + } + + public void setReceiversCc(String receiversCc) { + this.receiversCc = receiversCc; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "Alert{" + + "id=" + id + + ", title='" + title + '\'' + + ", showType=" + showType + + ", content='" + content + '\'' + + ", alertType=" + alertType + + ", alertStatus=" + alertStatus + + ", log='" + log + '\'' + + ", alertGroupId=" + alertGroupId + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + ", receivers='" + receivers + '\'' + + ", receiversCc='" + receiversCc + '\'' + + ", info=" + info + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AlertGroup.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AlertGroup.java new file mode 100644 index 0000000000..62afb4969a --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AlertGroup.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.enums.AlertType; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +@Data +@TableName("t_escheduler_alertgroup") +public class AlertGroup { + + + /** + * primary key + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * alert group name + */ + private String groupName; + + /** + * alert group type + */ + private AlertType groupType; + + /** + * alert group description + */ + private String description; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getGroupName() { + return groupName; + } + + public void setGroupName(String groupName) { + this.groupName = groupName; + } + + public AlertType getGroupType() { + return groupType; + } + + public void setGroupType(AlertType groupType) { + this.groupType = groupType; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java new file mode 100644 index 0000000000..2f42dfee0b --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java @@ -0,0 +1,288 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; +import org.apache.dolphinscheduler.common.enums.*; + +import java.util.Date; + +/** + * command + */ +@Data +@TableName("t_escheduler_command") +public class Command { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * command type + */ + @TableField("command_type") + private CommandType commandType; + + /** + * process definition id + */ + @TableField("process_definition_id") + private int processDefinitionId; + + /** + * executor id + */ + @TableField("executor_id") + private int executorId; + + /** + * command parameter, format json + */ + @TableField("command_param") + private String commandParam; + + /** + * task depend type + */ + @TableField("task_depend_type") + private TaskDependType taskDependType; + + /** + * failure strategy + */ + @TableField("failure_strategy") + private FailureStrategy failureStrategy; + + /** + * warning type + */ + @TableField("warning_type") + private WarningType warningType; + + /** + * warning group id + */ + @TableField("warning_type") + private Integer warningGroupId; + + /** + * schedule time + */ + @TableField("schedule_time") + private Date scheduleTime; + + /** + * start time + */ + @TableField("start_time") + private Date startTime; + + /** + * process instance priority + */ + @TableField("process_instance_priority") + private Priority processInstancePriority; + + /** + * update time + */ + @TableField("update_time") + private Date updateTime; + + + /** + * + */ + @TableField("worker_group_id;") + private int workerGroupId; + + + public Command() { + this.taskDependType = TaskDependType.TASK_POST; + this.failureStrategy = FailureStrategy.CONTINUE; + this.startTime = new Date(); + this.updateTime = new Date(); + } + + public Command( + CommandType commandType, + TaskDependType taskDependType, + FailureStrategy failureStrategy, + int executorId, + int processDefinitionId, + String commandParam, + WarningType warningType, + int warningGroupId, + Date scheduleTime, + Priority processInstancePriority) { + this.commandType = commandType; + this.executorId = executorId; + this.processDefinitionId = processDefinitionId; + this.commandParam = commandParam; + this.warningType = warningType; + this.warningGroupId = warningGroupId; + this.scheduleTime = scheduleTime; + this.taskDependType = taskDependType; + this.failureStrategy = failureStrategy; + this.startTime = new Date(); + this.updateTime = new Date(); + this.processInstancePriority = processInstancePriority; + } + + + public TaskDependType getTaskDependType() { + return taskDependType; + } + + public void setTaskDependType(TaskDependType taskDependType) { + this.taskDependType = taskDependType; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public CommandType getCommandType() { + return commandType; + } + + public void setCommandType(CommandType commandType) { + this.commandType = commandType; + } + + public int getProcessDefinitionId() { + return processDefinitionId; + } + + public void setProcessDefinitionId(int processDefinitionId) { + this.processDefinitionId = processDefinitionId; + } + + + public FailureStrategy getFailureStrategy() { + return failureStrategy; + } + + public void setFailureStrategy(FailureStrategy failureStrategy) { + this.failureStrategy = failureStrategy; + } + + public void setCommandParam(String commandParam) { + this.commandParam = commandParam; + } + + public String getCommandParam() { + return commandParam; + } + + public WarningType getWarningType() { + return warningType; + } + + public void setWarningType(WarningType warningType) { + this.warningType = warningType; + } + + public Integer getWarningGroupId() { + return warningGroupId; + } + + public void setWarningGroupId(Integer warningGroupId) { + this.warningGroupId = warningGroupId; + } + + public Date getScheduleTime() { + return scheduleTime; + } + + public void setScheduleTime(Date scheduleTime) { + this.scheduleTime = scheduleTime; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public int getExecutorId() { + return executorId; + } + + public void setExecutorId(int executorId) { + this.executorId = executorId; + } + + public Priority getProcessInstancePriority() { + return processInstancePriority; + } + + public void setProcessInstancePriority(Priority processInstancePriority) { + this.processInstancePriority = processInstancePriority; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + + public int getWorkerGroupId() { + return workerGroupId; + } + + public void setWorkerGroupId(int workerGroupId) { + this.workerGroupId = workerGroupId; + } + + @Override + public String toString() { + return "Command{" + + "id=" + id + + ", commandType=" + commandType + + ", processDefinitionId=" + processDefinitionId + + ", executorId=" + executorId + + ", commandParam='" + commandParam + '\'' + + ", taskDependType=" + taskDependType + + ", failureStrategy=" + failureStrategy + + ", warningType=" + warningType + + ", warningGroupId=" + warningGroupId + + ", scheduleTime=" + scheduleTime + + ", startTime=" + startTime + + ", processInstancePriority=" + processInstancePriority + + ", updateTime=" + updateTime + + ", workerGroupId=" + workerGroupId + + '}'; + } +} + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/CommandCount.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/CommandCount.java new file mode 100644 index 0000000000..158169b727 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/CommandCount.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + + +import org.apache.dolphinscheduler.common.enums.CommandType; + +public class CommandCount { + + + /** + * execution state + */ + private CommandType commandType; + + /** + * count for state + */ + private int count; + + + @Override + public String toString(){ + return "command count:" + + " commandType: "+ commandType.toString() + + " count: "+ count; + } + + public CommandType getCommandType() { + return commandType; + } + + public void setCommandType(CommandType commandType) { + this.commandType = commandType; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/CycleDependency.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/CycleDependency.java new file mode 100644 index 0000000000..e7ad38b6ea --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/CycleDependency.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.enums.CycleEnum; + +import java.util.Date; + +/** + * cycle dependency + */ +public class CycleDependency { + /** + * process define id + */ + private int processDefineId; + /** + * last schedule time + */ + private Date lastScheduleTime; + /** + * expiration time + */ + private Date expirationTime; + /** + * cycle enum + */ + private CycleEnum cycleEnum; + + + public CycleDependency(int processDefineId, Date lastScheduleTime, Date expirationTime, CycleEnum cycleEnum) { + this.processDefineId = processDefineId; + this.lastScheduleTime = lastScheduleTime; + this.expirationTime = expirationTime; + this.cycleEnum = cycleEnum; + } + + public int getProcessDefineId() { + return processDefineId; + } + + public void setProcessDefineId(int processDefineId) { + this.processDefineId = processDefineId; + } + + public Date getLastScheduleTime() { + return lastScheduleTime; + } + + public void setLastScheduleTime(Date lastScheduleTime) { + this.lastScheduleTime = lastScheduleTime; + } + + public Date getExpirationTime() { + return expirationTime; + } + + public void setExpirationTime(Date expirationTime) { + this.expirationTime = expirationTime; + } + + public CycleEnum getCycleEnum() { + return cycleEnum; + } + + public void setCycleEnum(CycleEnum cycleEnum) { + this.cycleEnum = cycleEnum; + } + + @Override + public String toString() { + return "CycleDependency{" + + "processDefineId=" + processDefineId + + ", lastScheduleTime=" + lastScheduleTime + + ", expirationTime=" + expirationTime + + ", cycleEnum=" + cycleEnum + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DataSource.java new file mode 100644 index 0000000000..f61b8339e8 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DataSource.java @@ -0,0 +1,196 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + + + +import org.apache.dolphinscheduler.common.enums.DbType; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +@Data +@TableName("t_escheduler_datasource") +public class DataSource { + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * user id + */ + private int userId; + + /** + * user name + */ + @TableField(exist = false) + private String userName; + + /** + * data source name + */ + private String name; + + /** + * note + */ + private String note; + + /** + * data source type + */ + private DbType type; + + /** + * connection parameters + */ + private String connectionParams; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + public DataSource() { + } + + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getNote() { + return note; + } + + public void setNote(String note) { + this.note = note; + } + + public DbType getType() { + return type; + } + + public void setType(DbType type) { + this.type = type; + } + + public String getConnectionParams() { + return connectionParams; + } + + public void setConnectionParams(String connectionParams) { + this.connectionParams = connectionParams; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + + @Override + public String toString() { + return "DataSource{" + + "id=" + id + + ", userId=" + userId + + ", userName='" + userName + '\'' + + ", name='" + name + '\'' + + ", note='" + note + '\'' + + ", type=" + type + + ", connectionParams='" + connectionParams + '\'' + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + DataSource that = (DataSource) o; + + if (id != that.id) { + return false; + } + return name.equals(that.name); + + } + + @Override + public int hashCode() { + int result = id; + result = 31 * result + name.hashCode(); + return result; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DatasourceUser.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DatasourceUser.java new file mode 100644 index 0000000000..85331cf8e2 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DatasourceUser.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import java.util.Date; + +/** + * data source user relation + */ +@TableName("t_escheduler_relation_datasource_user") +public class DatasourceUser { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * user id + */ + private int userId; + + /** + * data source id + */ + private int datasourceId; + + /** + * permission + */ + private int perm; + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public int getDatasourceId() { + return datasourceId; + } + + public void setDatasourceId(int datasourceId) { + this.datasourceId = datasourceId; + } + + public int getPerm() { + return perm; + } + + public void setPerm(int perm) { + this.perm = perm; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "DatasourceUser{" + + "id=" + id + + ", userId=" + userId + + ", datasourceId=" + datasourceId + + ", perm=" + perm + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DefinitionGroupByUser.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DefinitionGroupByUser.java new file mode 100644 index 0000000000..12f1a5fbe4 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DefinitionGroupByUser.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +/** + * count definition number group by user + */ +public class DefinitionGroupByUser { + + /** + * user name + */ + private String userName; + + /** + * user id + */ + private Integer userId; + + /** + * count number + */ + private int count; + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + public Integer getUserId() { + return userId; + } + + public void setUserId(Integer userId) { + this.userId = userId; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Dependency.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Dependency.java new file mode 100644 index 0000000000..8cac28368e --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Dependency.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.enums.SelfDependStrategy; + +/** + * dependency + */ +public class Dependency { + + /** + * self depend strategy + */ + private SelfDependStrategy self; + + /** + * outer dependency string + */ + private String outer; + + + public Dependency(){} + + public Dependency(String outer, SelfDependStrategy self){ + + this.outer = outer; + this.self = self; + + } + + + public SelfDependStrategy getSelf() { + return self; + } + + public void setSelf(SelfDependStrategy self) { + this.self = self; + } + + public String getOuter() { + return outer; + } + + public void setOuter(String outer) { + this.outer = outer; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java new file mode 100644 index 0000000000..ef92bea7b0 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java @@ -0,0 +1,297 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import org.apache.dolphinscheduler.common.enums.*; + +import java.util.Date; + +/** + * command + */ +@TableName("t_escheduler_error_command") +public class ErrorCommand { + + /** + * id + */ + @TableId(value="id", type = IdType.INPUT) + private int id; + + /** + * command type + */ + private CommandType commandType; + + /** + * process definition id + */ + private int processDefinitionId; + + /** + * executor id + */ + private int executorId; + + /** + * command parameter, format json + */ + private String commandParam; + + /** + * task depend type + */ + private TaskDependType taskDependType; + + /** + * failure strategy + */ + private FailureStrategy failureStrategy; + + /** + * warning type + */ + private WarningType warningType; + + /** + * warning group id + */ + private Integer warningGroupId; + + /** + * schedule time + */ + private Date scheduleTime; + + /** + * start time + */ + private Date startTime; + + /** + * process instance priority + */ + private Priority processInstancePriority; + + /** + * update time + */ + private Date updateTime; + + /** + * 执行信息 + */ + private String message; + + /** + * worker group id + */ + private int workerGroupId; + + public ErrorCommand(){} + + public ErrorCommand(Command command, String message){ + this.id = command.getId(); + this.commandType = command.getCommandType(); + this.executorId = command.getExecutorId(); + this.processDefinitionId = command.getProcessDefinitionId(); + this.commandParam = command.getCommandParam(); + this.warningType = command.getWarningType(); + this.warningGroupId = command.getWarningGroupId(); + this.scheduleTime = command.getScheduleTime(); + this.taskDependType = command.getTaskDependType(); + this.failureStrategy = command.getFailureStrategy(); + this.startTime = command.getStartTime(); + this.updateTime = command.getUpdateTime(); + this.processInstancePriority = command.getProcessInstancePriority(); + this.message = message; + } + + public ErrorCommand( + CommandType commandType, + TaskDependType taskDependType, + FailureStrategy failureStrategy, + int executorId, + int processDefinitionId, + String commandParam, + WarningType warningType, + int warningGroupId, + Date scheduleTime, + Priority processInstancePriority, + String message){ + this.commandType = commandType; + this.executorId = executorId; + this.processDefinitionId = processDefinitionId; + this.commandParam = commandParam; + this.warningType = warningType; + this.warningGroupId = warningGroupId; + this.scheduleTime = scheduleTime; + this.taskDependType = taskDependType; + this.failureStrategy = failureStrategy; + this.startTime = new Date(); + this.updateTime = new Date(); + this.processInstancePriority = processInstancePriority; + this.message = message; + } + + + public TaskDependType getTaskDependType() { + return taskDependType; + } + + public void setTaskDependType(TaskDependType taskDependType) { + this.taskDependType = taskDependType; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public CommandType getCommandType() { + return commandType; + } + + public void setCommandType(CommandType commandType) { + this.commandType = commandType; + } + + public int getProcessDefinitionId() { + return processDefinitionId; + } + + public void setProcessDefinitionId(int processDefinitionId) { + this.processDefinitionId = processDefinitionId; + } + + + public FailureStrategy getFailureStrategy() { + return failureStrategy; + } + + public void setFailureStrategy(FailureStrategy failureStrategy) { + this.failureStrategy = failureStrategy; + } + + public void setCommandParam(String commandParam) { + this.commandParam = commandParam; + } + + public String getCommandParam() { + return commandParam; + } + + public WarningType getWarningType() { + return warningType; + } + + public void setWarningType(WarningType warningType) { + this.warningType = warningType; + } + + public Integer getWarningGroupId() { + return warningGroupId; + } + + public void setWarningGroupId(Integer warningGroupId) { + this.warningGroupId = warningGroupId; + } + + public Date getScheduleTime() { + return scheduleTime; + } + + public void setScheduleTime(Date scheduleTime) { + this.scheduleTime = scheduleTime; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public int getExecutorId() { + return executorId; + } + + public void setExecutorId(int executorId) { + this.executorId = executorId; + } + + public Priority getProcessInstancePriority() { + return processInstancePriority; + } + + public void setProcessInstancePriority(Priority processInstancePriority) { + this.processInstancePriority = processInstancePriority; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public int getWorkerGroupId() { + return workerGroupId; + } + + public void setWorkerGroupId(int workerGroupId) { + this.workerGroupId = workerGroupId; + } + + @Override + public String toString() { + return "Command{" + + "id=" + id + + ", commandType=" + commandType + + ", processDefinitionId=" + processDefinitionId + + ", executorId=" + executorId + + ", commandParam='" + commandParam + '\'' + + ", taskDependType=" + taskDependType + + ", failureStrategy=" + failureStrategy + + ", warningType=" + warningType + + ", warningGroupId=" + warningGroupId + + ", scheduleTime=" + scheduleTime + + ", startTime=" + startTime + + ", processInstancePriority=" + processInstancePriority + + ", updateTime=" + updateTime + + ", message=" + message + + '}'; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ExecuteStatusCount.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ExecuteStatusCount.java new file mode 100644 index 0000000000..0e877a60f8 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ExecuteStatusCount.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; + +/** + * count execute state + * + */ +public class ExecuteStatusCount { + + /** + * execution state + */ + private ExecutionStatus state; + + /** + * count for state + */ + private int count; + + public ExecutionStatus getExecutionStatus() { + return state; + } + + public void setExecutionStatus(ExecutionStatus executionStatus) { + this.state = executionStatus; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + @Override + public String toString() { + return "ExecuteStatusCount{" + + "state=" + state + + ", count=" + count + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/MonitorRecord.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/MonitorRecord.java new file mode 100644 index 0000000000..23b17a37d5 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/MonitorRecord.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import java.util.Date; + +/** + * monitor record for database + */ +public class MonitorRecord { + + /** + * is normal or not , 1: normal + */ + private int state; + + /** + * max connections + */ + private long maxConnections; + + /** + * max used connections + */ + private long maxUsedConnections; + + /** + * threads connections + */ + private long threadsConnections; + + /** + * threads running connections + */ + private long threadsRunningConnections; + + /** + * start date + */ + private Date date; + + public int getState() { + return state; + } + + public void setState(int state) { + this.state = state; + } + + public long getMaxConnections() { + return maxConnections; + } + + public void setMaxConnections(long maxConnections) { + this.maxConnections = maxConnections; + } + + public long getMaxUsedConnections() { + return maxUsedConnections; + } + + public void setMaxUsedConnections(long maxUsedConnections) { + this.maxUsedConnections = maxUsedConnections; + } + + public long getThreadsConnections() { + return threadsConnections; + } + + public void setThreadsConnections(long threadsConnections) { + this.threadsConnections = threadsConnections; + } + + public long getThreadsRunningConnections() { + return threadsRunningConnections; + } + + public void setThreadsRunningConnections(long threadsRunningConnections) { + this.threadsRunningConnections = threadsRunningConnections; + } + + public Date getDate() { + return date; + } + + public void setDate(Date date) { + this.date = date; + } + + @Override + public String toString() { + return "MonitorRecord{" + + "state=" + state + + ", maxConnections=" + maxConnections + + ", maxUsedConnections=" + maxUsedConnections + + ", threadsConnections=" + threadsConnections + + ", threadsRunningConnections=" + threadsRunningConnections + + ", date=" + date + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessData.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessData.java new file mode 100644 index 0000000000..2dce01d9f8 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessData.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; + +import java.util.List; + +/** + * definition json data structure + */ +public class ProcessData { + /** + * task list + */ + private List tasks; + + /** + * global parameters + */ + private List globalParams; + + + private int timeout; + + private int tenantId; + + + public ProcessData() { + } + + /** + * + * @param tasks + * @param globalParams + */ + public ProcessData(List tasks, List globalParams) { + this.tasks = tasks; + this.globalParams = globalParams; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + + if (o == null || getClass() != o.getClass()) { + return false; + } + + ProcessData that = (ProcessData) o; + + return CollectionUtils.equalLists(tasks, that.tasks) && + CollectionUtils.equalLists(globalParams, that.globalParams); + } + + public List getTasks() { + return tasks; + } + + public void setTasks(List tasks) { + this.tasks = tasks; + } + + public List getGlobalParams() { + return globalParams; + } + + public void setGlobalParams(List globalParams) { + this.globalParams = globalParams; + } + + public int getTimeout() { + return timeout; + } + + public void setTimeout(int timeout) { + this.timeout = timeout; + } + + public int getTenantId() { + return tenantId; + } + + public void setTenantId(int tenantId) { + this.tenantId = tenantId; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java new file mode 100644 index 0000000000..a86f0c7c2f --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java @@ -0,0 +1,385 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.process.Property; +import com.alibaba.fastjson.JSONObject; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.baomidou.mybatisplus.core.toolkit.StringUtils; +import lombok.Data; + +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + + +/** + * process definition + */ +@Data +@TableName("t_escheduler_process_definition") +public class ProcessDefinition { + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * name + */ + private String name; + + /** + * version + */ + private int version; + + /** + * release state : online/offline + */ + private ReleaseState releaseState; + + /** + * project id + */ + private int projectId; + + /** + * definition json string + */ + private String processDefinitionJson; + + /** + * description + */ + private String description; + + /** + * user defined parameters + */ + private String globalParams; + + /** + * user defined parameter list + */ + @TableField(exist=false) + private List globalParamList; + + /** + * user define parameter map + */ + @TableField(exist=false) + private Map globalParamMap; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + /** + * process is valid: yes/no + */ + private Flag flag; + + /** + * process user id + */ + private int userId; + + /** + * user name + */ + @TableField(exist = false) + private String userName; + + /** + * project name + */ + @TableField(exist = false) + private String projectName; + + /** + * locations array for web + */ + private String locations; + + /** + * connects array for web + */ + private String connects; + + /** + * receivers + */ + private String receivers; + + /** + * receivers cc + */ + private String receiversCc; + + /** + * schedule release state : online/offline + */ + @TableField(exist=false) + private ReleaseState scheduleReleaseState; + + /** + * process warning time out. unit: minute + */ + private int timeout; + + /** + * tenant id + */ + private int tenantId; + + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public int getVersion() { + return version; + } + + public void setVersion(int version) { + this.version = version; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public ReleaseState getReleaseState() { + return releaseState; + } + + public void setReleaseState(ReleaseState releaseState) { + this.releaseState = releaseState; + } + + public String getProcessDefinitionJson() { + return processDefinitionJson; + } + + public void setProcessDefinitionJson(String processDefinitionJson) { + this.processDefinitionJson = processDefinitionJson; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public int getProjectId() { + return projectId; + } + + public void setProjectId(int projectId) { + this.projectId = projectId; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public Flag getFlag() { + return flag; + } + + public void setFlag(Flag flag) { + this.flag = flag; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + + public String getGlobalParams() { + return globalParams; + } + + public void setGlobalParams(String globalParams) { + this.globalParamList = JSONObject.parseArray(globalParams, Property.class); + this.globalParams = globalParams; + } + + public List getGlobalParamList() { + return globalParamList; + } + + public void setGlobalParamList(List globalParamList) { + this.globalParams = JSONObject.toJSONString(globalParamList); + this.globalParamList = globalParamList; + } + + public Map getGlobalParamMap() { + List propList; + + if (globalParamMap == null && StringUtils.isNotEmpty(globalParams)) { + propList = JSONObject.parseArray(globalParams, Property.class); + globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); + } + + return globalParamMap; + } + + public void setGlobalParamMap(Map globalParamMap) { + this.globalParamMap = globalParamMap; + } + + public String getLocations() { + return locations; + } + + public void setLocations(String locations) { + this.locations = locations; + } + + public String getConnects() { + return connects; + } + + public void setConnects(String connects) { + this.connects = connects; + } + + public String getReceivers() { + return receivers; + } + + public void setReceivers(String receivers) { + this.receivers = receivers; + } + + public String getReceiversCc() { + return receiversCc; + } + + public void setReceiversCc(String receiversCc) { + this.receiversCc = receiversCc; + } + + public ReleaseState getScheduleReleaseState() { + return scheduleReleaseState; + } + + public void setScheduleReleaseState(ReleaseState scheduleReleaseState) { + this.scheduleReleaseState = scheduleReleaseState; + } + + public int getTimeout() { + return timeout; + } + + public void setTimeout(int timeout) { + this.timeout = timeout; + } + + @Override + public String toString() { + return "ProcessDefinition{" + + "id=" + id + + ", name='" + name + '\'' + + ", version=" + version + + ", releaseState=" + releaseState + + ", projectId=" + projectId + + ", processDefinitionJson='" + processDefinitionJson + '\'' + + ", globalParams='" + globalParams + '\'' + + ", globalParamList=" + globalParamList + + ", globalParamMap=" + globalParamMap + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + ", flag=" + flag + + ", userId=" + userId + + ", userName='" + userName + '\'' + + ", projectName='" + projectName + '\'' + + ", locations='" + locations + '\'' + + ", connects='" + connects + '\'' + + ", receivers='" + receivers + '\'' + + ", receiversCc='" + receiversCc + '\'' + + ", scheduleReleaseState=" + scheduleReleaseState + + ", timeout=" + timeout + + ", tenantId=" + tenantId + + '}'; + } + + public int getTenantId() { + return tenantId; + } + + public void setTenantId(int tenantId) { + this.tenantId = tenantId; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java new file mode 100644 index 0000000000..770cd98e56 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java @@ -0,0 +1,620 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.baomidou.mybatisplus.core.toolkit.StringUtils; +import lombok.Data; +import org.apache.dolphinscheduler.common.enums.*; + +import java.util.Date; + +/** + * process instance + */ +@Data +@TableName("t_escheduler_process_instance") +public class ProcessInstance { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + /** + * process definition id + */ + private int processDefinitionId; + /** + * process state + */ + private ExecutionStatus state; + /** + * recovery flag for failover + */ + private Flag recovery; + /** + * start time + */ + private Date startTime; + + /** + * end time + */ + private Date endTime; + + /** + * run time + */ + private int runTimes; + + /** + * name + */ + private String name; + + /** + * host + */ + private String host; + + /** + * process definition structure + */ + @TableField(exist = false) + private ProcessDefinition processDefinition; + /** + * process command type + */ + private CommandType commandType; + + /** + * command parameters + */ + private String commandParam; + + /** + * node depend type + */ + private TaskDependType taskDependType; + + /** + * task max try times + */ + private int maxTryTimes; + + /** + * failure strategy when task failed. + */ + private FailureStrategy failureStrategy; + + /** + * warning type + */ + private WarningType warningType; + + /** + * warning group + */ + private Integer warningGroupId; + + /** + * schedule time + */ + private Date scheduleTime; + + /** + * command start time + */ + private Date commandStartTime; + + /** + * user define parameters string + */ + private String globalParams; + + /** + * process instance json + */ + private String processInstanceJson; + + /** + * executor id + */ + private int executorId; + + /** + * tenant code + */ + @TableField(exist = false) + private String tenantCode; + + /** + * queue + */ + @TableField(exist = false) + private String queue; + + /** + * process is sub process + */ + private Flag isSubProcess; + + /** + * task locations for web + */ + private String locations; + + /** + * task connects for web + */ + private String connects; + + /** + * history command + */ + private String historyCmd; + + /** + * depend processes schedule time + */ + private String dependenceScheduleTimes; + + /** + * process duration + * @return + */ + @TableField(exist = false) + private Long duration; + + /** + * process instance priority + */ + private Priority processInstancePriority; + + /** + * worker group id + */ + private int workerGroupId; + + /** + * process timeout for warning + */ + private int timeout; + + /** + * tenant id + */ + private int tenantId; + + /** + * worker group name. for api. + */ + @TableField(exist = false) + private String workerGroupName; + + /** + * receivers for api + */ + @TableField(exist = false) + private String receivers; + + /** + * receivers cc for api + */ + @TableField(exist = false) + private String receiversCc; + + public ProcessInstance(){ + + } + + /** + * set the process name with process define version and timestamp + * @param processDefinition + */ + public ProcessInstance(ProcessDefinition processDefinition){ + this.processDefinition = processDefinition; + this.name = processDefinition.getName() + "-" + + processDefinition.getVersion() + "-" + + System.currentTimeMillis(); + } + + public ProcessDefinition getProcessDefinition() { + return processDefinition; + } + + public void setProcessDefinition(ProcessDefinition processDefinition) { + this.processDefinition = processDefinition; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getProcessDefinitionId() { + return processDefinitionId; + } + + public void setProcessDefinitionId(int processDefinitionId) { + this.processDefinitionId = processDefinitionId; + } + + public ExecutionStatus getState() { + return state; + } + + public void setState(ExecutionStatus state) { + this.state = state; + } + + public Flag getRecovery() { + return recovery; + } + + public void setRecovery(Flag recovery) { + this.recovery = recovery; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public int getRunTimes() { + return runTimes; + } + + public void setRunTimes(int runTimes) { + this.runTimes = runTimes; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + + public CommandType getCommandType() { + return commandType; + } + + public void setCommandType(CommandType commandType) { + this.commandType = commandType; + } + + public String getCommandParam() { + return commandParam; + } + + public void setCommandParam(String commandParam) { + this.commandParam = commandParam; + } + + public TaskDependType getTaskDependType() { + return taskDependType; + } + + public void setTaskDependType(TaskDependType taskDependType) { + this.taskDependType = taskDependType; + } + + + public int getMaxTryTimes() { + return maxTryTimes; + } + + public void setMaxTryTimes(int maxTryTimes) { + this.maxTryTimes = maxTryTimes; + } + + public FailureStrategy getFailureStrategy() { + return failureStrategy; + } + + public void setFailureStrategy(FailureStrategy failureStrategy) { + this.failureStrategy = failureStrategy; + } + + + public boolean IsProcessInstanceStop(){ + return this.state.typeIsFinished(); + } + + public WarningType getWarningType() { + return warningType; + } + + public void setWarningType(WarningType warningType) { + this.warningType = warningType; + } + + public Integer getWarningGroupId() { + return warningGroupId; + } + + public void setWarningGroupId(Integer warningGroupId) { + this.warningGroupId = warningGroupId; + } + + public Date getScheduleTime() { + return scheduleTime; + } + + public void setScheduleTime(Date scheduleTime) { + this.scheduleTime = scheduleTime; + } + + public Date getCommandStartTime() { + return commandStartTime; + } + + public void setCommandStartTime(Date commandStartTime) { + this.commandStartTime = commandStartTime; + } + + public String getGlobalParams() { + return globalParams; + } + + public void setGlobalParams(String globalParams) { + this.globalParams = globalParams; + } + + public String getProcessInstanceJson() { + return processInstanceJson; + } + + public void setProcessInstanceJson(String processInstanceJson) { + this.processInstanceJson = processInstanceJson; + } + + public String getTenantCode() { + return tenantCode; + } + + public void setTenantCode(String tenantCode) { + this.tenantCode = tenantCode; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + public int getExecutorId() { + return executorId; + } + + public void setExecutorId(int executorId) { + this.executorId = executorId; + } + + + public Flag getIsSubProcess() { + return isSubProcess; + } + + public void setIsSubProcess(Flag isSubProcess) { + this.isSubProcess = isSubProcess; + } + + public Priority getProcessInstancePriority() { + return processInstancePriority; + } + + public void setProcessInstancePriority(Priority processInstancePriority) { + this.processInstancePriority = processInstancePriority; + } + public String getLocations() { + return locations; + } + + public void setLocations(String locations) { + this.locations = locations; + } + + public String getConnects() { + return connects; + } + + public void setConnects(String connects) { + this.connects = connects; + } + + public String getHistoryCmd() { + return historyCmd; + } + + public void setHistoryCmd(String historyCmd) { + this.historyCmd = historyCmd; + } + + /** + * add command to history + * @param cmd + */ + public void addHistoryCmd(CommandType cmd){ + if(StringUtils.isNotEmpty(this.historyCmd)){ + this.historyCmd = String.format("%s,%s", this.historyCmd, cmd.toString()); + }else{ + this.historyCmd = cmd.toString(); + } + } + + /** + * check this process is start complement data + */ + public Boolean isComplementData(){ + if(!StringUtils.isNotEmpty(this.historyCmd)){ + return false; + } + return historyCmd.startsWith(CommandType.COMPLEMENT_DATA.toString()); + } + + /** + * get current command type, + * if start with complement data,return complement + */ + public CommandType getCmdTypeIfComplement(){ + if(isComplementData()){ + return CommandType.COMPLEMENT_DATA; + } + return commandType; + } + + public String getDependenceScheduleTimes() { + return dependenceScheduleTimes; + } + + public void setDependenceScheduleTimes(String dependenceScheduleTimes) { + this.dependenceScheduleTimes = dependenceScheduleTimes; + } + + public Long getDuration() { + return duration; + } + + public void setDuration(Long duration) { + this.duration = duration; + } + + public int getWorkerGroupId() { + return workerGroupId; + } + + public void setWorkerGroupId(int workerGroupId) { + this.workerGroupId = workerGroupId; + } + + public int getTimeout() { + return timeout; + } + + public void setTimeout(int timeout) { + this.timeout = timeout; + } + + + public void setTenantId(int tenantId) { + this.tenantId = tenantId; + } + + public int getTenantId() { + return this.tenantId ; + } + + public String getWorkerGroupName() { + return workerGroupName; + } + + public void setWorkerGroupName(String workerGroupName) { + this.workerGroupName = workerGroupName; + } + + public String getReceivers() { + return receivers; + } + + public void setReceivers(String receivers) { + this.receivers = receivers; + } + + public String getReceiversCc() { + return receiversCc; + } + + public void setReceiversCc(String receiversCc) { + this.receiversCc = receiversCc; + } + + @Override + public String toString() { + return "ProcessInstance{" + + "id=" + id + + ", processDefinitionId=" + processDefinitionId + + ", state=" + state + + ", recovery=" + recovery + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", runTimes=" + runTimes + + ", name='" + name + '\'' + + ", host='" + host + '\'' + + ", processDefinition=" + processDefinition + + ", commandType=" + commandType + + ", commandParam='" + commandParam + '\'' + + ", taskDependType=" + taskDependType + + ", maxTryTimes=" + maxTryTimes + + ", failureStrategy=" + failureStrategy + + ", warningType=" + warningType + + ", warningGroupId=" + warningGroupId + + ", scheduleTime=" + scheduleTime + + ", commandStartTime=" + commandStartTime + + ", globalParams='" + globalParams + '\'' + + ", processInstanceJson='" + processInstanceJson + '\'' + + ", executorId=" + executorId + + ", tenantCode='" + tenantCode + '\'' + + ", queue='" + queue + '\'' + + ", isSubProcess=" + isSubProcess + + ", locations='" + locations + '\'' + + ", connects='" + connects + '\'' + + ", historyCmd='" + historyCmd + '\'' + + ", dependenceScheduleTimes='" + dependenceScheduleTimes + '\'' + + ", duration=" + duration + + ", processInstancePriority=" + processInstancePriority + + ", workerGroupId=" + workerGroupId + + ", timeout=" + timeout + + ", tenantId=" + tenantId + + ", workerGroupName='" + workerGroupName + '\'' + + ", receivers='" + receivers + '\'' + + ", receiversCc='" + receiversCc + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstanceMap.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstanceMap.java new file mode 100644 index 0000000000..0c6424ec8a --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstanceMap.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +/** + * process instance map + */ +@Data +@TableName("t_escheduler_relation_process_instance") +public class ProcessInstanceMap { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * parent process instance id + */ + private int parentProcessInstanceId; + + /** + * parent task instance id + */ + private int parentTaskInstanceId; + + /** + * process instance id + */ + private int processInstanceId; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getParentProcessInstanceId() { + return parentProcessInstanceId; + } + + public void setParentProcessInstanceId(int parentProcessInstanceId) { + this.parentProcessInstanceId = parentProcessInstanceId; + } + + public int getParentTaskInstanceId() { + return parentTaskInstanceId; + } + + public void setParentTaskInstanceId(int parentTaskInstanceId) { + this.parentTaskInstanceId = parentTaskInstanceId; + } + + public int getProcessInstanceId() { + return processInstanceId; + } + + public void setProcessInstanceId(int processInstanceId) { + this.processInstanceId = processInstanceId; + } + + @Override + public String toString() { + return "ProcessInstanceMap{" + + "id=" + id + + ", parentProcessInstanceId=" + parentProcessInstanceId + + ", parentTaskInstanceId=" + parentTaskInstanceId + + ", processInstanceId=" + processInstanceId + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java new file mode 100644 index 0000000000..3dcaf4490a --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java @@ -0,0 +1,211 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * project + */ +@Data +@TableName("t_escheduler_project") +public class Project { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * user id + */ + @TableField("user_id") + private int userId; + + /** + * user name + */ + @TableField(exist=false) + private String userName; + + /** + * project name + */ + @TableField("name") + private String name; + + /** + * project description + */ + @TableField("`desc`") + private String desc; + + /** + * create time + */ + @TableField("`create_time`") + private Date createTime; + + /** + * update time + */ + @TableField("`update_time`") + private Date updateTime; + + /** + * permission + */ + @TableField(exist=false) + private int perm; + + /** + * process define count + */ + @TableField(exist=false) + private int defCount; + + /** + * process instance running count + */ + @TableField(exist=false) + private int instRunningCount; + + public int getDefCount() { + return defCount; + } + + public void setDefCount(int defCount) { + this.defCount = defCount; + } + + public int getInstRunningCount() { + return instRunningCount; + } + + public void setInstRunningCount(int instRunningCount) { + this.instRunningCount = instRunningCount; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDesc() { + return desc; + } + + public void setDesc(String desc) { + this.desc = desc; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public int getPerm() { + return perm; + } + + public void setPerm(int perm) { + this.perm = perm; + } + @Override + public String toString() { + return "Project{" + + "id=" + id + + ", userId=" + userId + + ", userName='" + userName + '\'' + + ", name='" + name + '\'' + + ", desc='" + desc + '\'' + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } + + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Project project = (Project) o; + + if (id != project.id) { + return false; + } + return name.equals(project.name); + + } + + @Override + public int hashCode() { + int result = id; + result = 31 * result + name.hashCode(); + return result; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectUser.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectUser.java new file mode 100644 index 0000000000..0e089d8cba --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectUser.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +@Data +@TableName("t_escheduler_relation_project_user") +public class ProjectUser { + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + @TableField("user_id") + private int userId; + + @TableField("project_id") + private int projectId; + + /** + * project name + */ + @TableField(exist = false) + private String projectName; + + /** + * user name + */ + @TableField(exist = false) + private String userName; + + /** + * permission + */ + private int perm; + + @TableField("create_time") + private Date createTime; + + @TableField("update_time") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public int getProjectId() { + return projectId; + } + + public void setProjectId(int projectId) { + this.projectId = projectId; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public int getPerm() { + return perm; + } + + public void setPerm(int perm) { + this.perm = perm; + } + @Override + public String toString() { + return "ProjectUser{" + + "id=" + id + + ", projectId=" + projectId + + ", projectName='" + projectName + '\'' + + ", userId=" + userId + + ", userName='" + userName + '\'' + + ", perm=" + perm + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Queue.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Queue.java new file mode 100644 index 0000000000..94f8206914 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Queue.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * queue + */ +@Data +@TableName("t_escheduler_queue") +public class Queue { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + /** + * queue name + */ + private String queueName; + /** + * yarn queue name + */ + private String queue; + + /** + * create time + */ + private Date createTime; + /** + * update time + */ + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getQueueName() { + return queueName; + } + + public void setQueueName(String queueName) { + this.queueName = queueName; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "Queue{" + + "id=" + id + + ", queueName='" + queueName + '\'' + + ", queue='" + queue + '\'' + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java new file mode 100644 index 0000000000..1ac3f3ef34 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java @@ -0,0 +1,217 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + + +import org.apache.dolphinscheduler.common.enums.ResourceType; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +@Data +@TableName("t_escheduler_resources") +public class Resource { + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * resource alias + */ + private String alias; + + /** + * description + */ + private String desc; + + /** + * file alias + */ + private String fileName; + + /** + * user id + */ + private int userId; + + /** + * resource type + */ + private ResourceType type; + + /** + * resource size + */ + private long size; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + public Resource() { + } + + public Resource(int id, String alias, String fileName, String desc, int userId, + ResourceType type, long size, + Date createTime, Date updateTime) { + this.id = id; + this.alias = alias; + this.fileName = fileName; + this.desc = desc; + this.userId = userId; + this.type = type; + this.size = size; + this.createTime = createTime; + this.updateTime = updateTime; + } + + public Resource(String alias, String fileName, String desc, int userId, ResourceType type, long size, Date createTime, Date updateTime) { + this.alias = alias; + this.fileName = fileName; + this.desc = desc; + this.userId = userId; + this.type = type; + this.size = size; + this.createTime = createTime; + this.updateTime = updateTime; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public String getFileName() { + return fileName; + } + + public void setFileName(String fileName) { + this.fileName = fileName; + } + + public String getDesc() { + return desc; + } + + public void setDesc(String desc) { + this.desc = desc; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + + public ResourceType getType() { + return type; + } + + public void setType(ResourceType type) { + this.type = type; + } + + public long getSize() { + return size; + } + + public void setSize(long size) { + this.size = size; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "Resource{" + + "id=" + id + + ", alias='" + alias + '\'' + + ", fileName='" + fileName + '\'' + + ", desc='" + desc + '\'' + + ", userId=" + userId + + ", type=" + type + + ", size=" + size + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Resource resource = (Resource) o; + + if (id != resource.id) { + return false; + } + return alias.equals(resource.alias); + + } + + @Override + public int hashCode() { + int result = id; + result = 31 * result + alias.hashCode(); + return result; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ResourcesUser.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ResourcesUser.java new file mode 100644 index 0000000000..b1d66880e7 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ResourcesUser.java @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * resource user relation + */ +@Data +@TableName("t_escheduler_relation_resources_user") +public class ResourcesUser { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * user id + */ + private int userId; + + /** + * resource id + */ + private int resourcesId; + + /** + * permission + */ + private int perm; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public int getResourcesId() { + return resourcesId; + } + + public void setResourcesId(int resourcesId) { + this.resourcesId = resourcesId; + } + + public int getPerm() { + return perm; + } + + public void setPerm(int perm) { + this.perm = perm; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "ResourcesUser{" + + "id=" + id + + ", userId=" + userId + + ", resourcesId=" + resourcesId + + ", perm=" + perm + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java new file mode 100644 index 0000000000..ab7cf47d7e --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java @@ -0,0 +1,310 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.WarningType; + +import java.util.Date; + +/** + * schedule + * + */ +@Data +@TableName("t_escheduler_schedules") +public class Schedule { + + @TableId(value="id", type=IdType.AUTO) + private int id; + /** + * process definition id + */ + private int processDefinitionId; + + /** + * process definition name + */ + @TableField(exist = false) + private String processDefinitionName; + + /** + * project name + */ + @TableField(exist = false) + private String projectName; + + /** + * schedule description + */ + @TableField(exist = false) + private String definitionDescription; + + /** + * schedule start time + */ + private Date startTime; + + /** + * schedule end time + */ + private Date endTime; + + /** + * crontab expression + */ + private String crontab; + + /** + * failure strategy + */ + private FailureStrategy failureStrategy; + + /** + * warning type + */ + private WarningType warningType; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + /** + * created user id + */ + private int userId; + + /** + * created user name + */ + @TableField(exist = false) + private String userName; + + /** + * release state + */ + private ReleaseState releaseState; + + /** + * warning group id + */ + private int warningGroupId; + + + /** + * process instance priority + */ + private Priority processInstancePriority; + + /** + * worker group id + */ + private int workerGroupId; + + public int getWarningGroupId() { + return warningGroupId; + } + + public void setWarningGroupId(int warningGroupId) { + this.warningGroupId = warningGroupId; + } + + + + public Schedule() { + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + + + public Date getStartTime() { + + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getCrontab() { + return crontab; + } + + public void setCrontab(String crontab) { + this.crontab = crontab; + } + + public FailureStrategy getFailureStrategy() { + return failureStrategy; + } + + public void setFailureStrategy(FailureStrategy failureStrategy) { + this.failureStrategy = failureStrategy; + } + + public WarningType getWarningType() { + return warningType; + } + + public void setWarningType(WarningType warningType) { + this.warningType = warningType; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + + public ReleaseState getReleaseState() { + return releaseState; + } + + public void setReleaseState(ReleaseState releaseState) { + this.releaseState = releaseState; + } + + + + public int getProcessDefinitionId() { + return processDefinitionId; + } + + public void setProcessDefinitionId(int processDefinitionId) { + this.processDefinitionId = processDefinitionId; + } + + public String getProcessDefinitionName() { + return processDefinitionName; + } + + public void setProcessDefinitionName(String processDefinitionName) { + this.processDefinitionName = processDefinitionName; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public Priority getProcessInstancePriority() { + return processInstancePriority; + } + + public void setProcessInstancePriority(Priority processInstancePriority) { + this.processInstancePriority = processInstancePriority; + } + + + public int getWorkerGroupId() { + return workerGroupId; + } + + public void setWorkerGroupId(int workerGroupId) { + this.workerGroupId = workerGroupId; + } + + @Override + public String toString() { + return "Schedule{" + + "id=" + id + + ", processDefinitionId=" + processDefinitionId + + ", processDefinitionName='" + processDefinitionName + '\'' + + ", projectName='" + projectName + '\'' + + ", description='" + definitionDescription + '\'' + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", crontab='" + crontab + '\'' + + ", failureStrategy=" + failureStrategy + + ", warningType=" + warningType + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + ", userId=" + userId + + ", userName='" + userName + '\'' + + ", releaseState=" + releaseState + + ", warningGroupId=" + warningGroupId + + ", processInstancePriority=" + processInstancePriority + + ", workerGroupId=" + workerGroupId + + '}'; + } + + public String getDefinitionDescription() { + return definitionDescription; + } + + public void setDefinitionDescription(String definitionDescription) { + this.definitionDescription = definitionDescription; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Session.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Session.java new file mode 100644 index 0000000000..a5c27b17cd --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Session.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * session + */ +@Data +@TableName("t_escheduler_session") +public class Session { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private String id; + + /** + * user id + */ + private int userId; + + /** + * last login time + */ + private Date lastLoginTime; + + /** + * user login ip + */ + private String ip; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public String getIp() { + return ip; + } + + public void setIp(String ip) { + this.ip = ip; + } + + public Date getLastLoginTime() { + return lastLoginTime; + } + + public void setLastLoginTime(Date lastLoginTime) { + this.lastLoginTime = lastLoginTime; + } + + @Override + public String toString() { + return "Session{" + + "id=" + id + + ", userId=" + userId + + ", ip='" + ip + '\'' + + ", lastLoginTime=" + lastLoginTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java new file mode 100644 index 0000000000..41b3fdcfa5 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java @@ -0,0 +1,518 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * task instance + */ +@Data +@TableName("t_escheduler_task_instance") +public class TaskInstance { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * task name + */ + private String name; + + /** + * task type + */ + private String taskType; + + /** + * process definition id + */ + private int processDefinitionId; + + /** + * process instance id + */ + private int processInstanceId; + + /** + * process instance name + */ + private String processInstanceName; + + /** + * task json + */ + private String taskJson; + + /** + * state + */ + private ExecutionStatus state; + + /** + * task submit time + */ + private Date submitTime; + + /** + * task start time + */ + private Date startTime; + + /** + * task end time + */ + private Date endTime; + + /** + * task host + */ + private String host; + + /** + * task shell execute path and the resource down from hdfs + * default path: $base_run_dir/processInstanceId/taskInstanceId/retryTimes + */ + private String executePath; + + /** + * task log path + * default path: $base_run_dir/processInstanceId/taskInstanceId/retryTimes + */ + private String logPath; + + /** + * retry times + */ + private int retryTimes; + + /** + * alert flag + */ + private Flag alertFlag; + + /** + * run flag + */ + private Flag runFlag; + + /** + * process instance + */ + private ProcessInstance processInstance; + + /** + * process definition + */ + private ProcessDefinition processDefine; + + /** + * process id + */ + private int pid; + + /** + * appLink + */ + private String appLink; + + /** + * flag + */ + private Flag flag; + + /** + * dependency + */ + private String dependency; + + /** + * duration + * @return + */ + private Long duration; + + /** + * max retry times + * @return + */ + private int maxRetryTimes; + + /** + * task retry interval, unit: minute + * @return + */ + private int retryInterval; + + /** + * task intance priority + */ + private Priority taskInstancePriority; + + /** + * process intance priority + */ + private Priority processInstancePriority; + + /** + * dependent state + * @return + */ + private String dependentResult; + + + /** + * worker group id + * @return + */ + private int workerGroupId; + + + + public void init(String host,Date startTime,String executePath){ + this.host = host; + this.startTime = startTime; + this.executePath = executePath; + } + + + public ProcessInstance getProcessInstance() { + return processInstance; + } + + public void setProcessInstance(ProcessInstance processInstance) { + this.processInstance = processInstance; + } + + public ProcessDefinition getProcessDefine() { + return processDefine; + } + + public void setProcessDefine(ProcessDefinition processDefine) { + this.processDefine = processDefine; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getTaskType() { + return taskType; + } + + public void setTaskType(String taskType) { + this.taskType = taskType; + } + + public int getProcessDefinitionId() { + return processDefinitionId; + } + + public void setProcessDefinitionId(int processDefinitionId) { + this.processDefinitionId = processDefinitionId; + } + + public int getProcessInstanceId() { + return processInstanceId; + } + + public void setProcessInstanceId(int processInstanceId) { + this.processInstanceId = processInstanceId; + } + + public String getTaskJson() { + return taskJson; + } + + public void setTaskJson(String taskJson) { + this.taskJson = taskJson; + } + + public ExecutionStatus getState() { + return state; + } + + public void setState(ExecutionStatus state) { + this.state = state; + } + + public Date getSubmitTime() { + return submitTime; + } + + public void setSubmitTime(Date submitTime) { + this.submitTime = submitTime; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getExecutePath() { + return executePath; + } + + public void setExecutePath(String executePath) { + this.executePath = executePath; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public Flag getAlertFlag() { + return alertFlag; + } + + public void setAlertFlag(Flag alertFlag) { + this.alertFlag = alertFlag; + } + + public int getRetryTimes() { + return retryTimes; + } + + public void setRetryTimes(int retryTimes) { + this.retryTimes = retryTimes; + } + + public Boolean isTaskSuccess(){ + return this.state == ExecutionStatus.SUCCESS; + } + + public int getPid() { + return pid; + } + + public void setPid(int pid) { + this.pid = pid; + } + + public String getAppLink() { + return appLink; + } + + public void setAppLink(String appLink) { + this.appLink = appLink; + } + + + public Boolean isSubProcess(){ + return TaskType.SUB_PROCESS.toString().equals(this.taskType.toUpperCase()); + } + + public String getDependency(){ + + if(this.dependency != null){ + return this.dependency; + } + TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class); + + return taskNode.getDependence(); + } + + public Flag getFlag() { + return flag; + } + + public void setFlag(Flag flag) { + this.flag = flag; + } + public String getProcessInstanceName() { + return processInstanceName; + } + + public void setProcessInstanceName(String processInstanceName) { + this.processInstanceName = processInstanceName; + } + + public Flag getRunFlag() { + return runFlag; + } + + public void setRunFlag(Flag runFlag) { + this.runFlag = runFlag; + } + + public Long getDuration() { + return duration; + } + + public void setDuration(Long duration) { + this.duration = duration; + } + + public int getMaxRetryTimes() { + return maxRetryTimes; + } + + public void setMaxRetryTimes(int maxRetryTimes) { + this.maxRetryTimes = maxRetryTimes; + } + + public int getRetryInterval() { + return retryInterval; + } + + public void setRetryInterval(int retryInterval) { + this.retryInterval = retryInterval; + } + + public Boolean isTaskComplete() { + + return this.getState().typeIsPause() + || this.getState().typeIsSuccess() + || this.getState().typeIsCancel() + || (this.getState().typeIsFailure() && !taskCanRetry()); + } + /** + * 判断是否可以重试 + * @return + */ + public boolean taskCanRetry() { + if(this.isSubProcess()){ + return false; + } + if(this.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE){ + return true; + }else { + return (this.getState().typeIsFailure() + && this.getRetryTimes() < this.getMaxRetryTimes()); + } + } + + public void setDependency(String dependency) { + this.dependency = dependency; + } + + public Priority getTaskInstancePriority() { + return taskInstancePriority; + } + + public void setTaskInstancePriority(Priority taskInstancePriority) { + this.taskInstancePriority = taskInstancePriority; + } + + public Priority getProcessInstancePriority() { + return processInstancePriority; + } + + public void setProcessInstancePriority(Priority processInstancePriority) { + this.processInstancePriority = processInstancePriority; + } + + public int getWorkerGroupId() { + return workerGroupId; + } + + public void setWorkerGroupId(int workerGroupId) { + this.workerGroupId = workerGroupId; + } + + @Override + public String toString() { + return "TaskInstance{" + + "id=" + id + + ", name='" + name + '\'' + + ", taskType='" + taskType + '\'' + + ", processDefinitionId=" + processDefinitionId + + ", processInstanceId=" + processInstanceId + + ", processInstanceName='" + processInstanceName + '\'' + + ", taskJson='" + taskJson + '\'' + + ", state=" + state + + ", submitTime=" + submitTime + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", host='" + host + '\'' + + ", executePath='" + executePath + '\'' + + ", logPath='" + logPath + '\'' + + ", retryTimes=" + retryTimes + + ", alertFlag=" + alertFlag + + ", runFlag=" + runFlag + + ", processInstance=" + processInstance + + ", processDefine=" + processDefine + + ", pid=" + pid + + ", appLink='" + appLink + '\'' + + ", flag=" + flag + + ", dependency=" + dependency + + ", duration=" + duration + + ", maxRetryTimes=" + maxRetryTimes + + ", retryInterval=" + retryInterval + + ", taskInstancePriority=" + taskInstancePriority + + ", processInstancePriority=" + processInstancePriority + + ", workGroupId=" + workerGroupId + + '}'; + } + + public String getDependentResult() { + return dependentResult; + } + + public void setDependentResult(String dependentResult) { + this.dependentResult = dependentResult; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskRecord.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskRecord.java new file mode 100644 index 0000000000..9a6979525b --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskRecord.java @@ -0,0 +1,256 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import java.util.Date; + +/** + * task record for qianfan + */ +public class TaskRecord { + + /** + * id + */ + private int id; + + /** + * process id + */ + private int procId; + + /** + * procedure name + */ + private String procName; + + /** + * procedure date + */ + private String procDate; + + /** + * start date + */ + private Date startTime; + + /** + * end date + */ + private Date endTime; + + /** + * result + */ + private String result; + + /** + * duration unit: second + */ + private int duration; + + /** + * note + */ + private String note; + + /** + * schema + */ + private String schema; + + /** + * job id + */ + private String jobId; + + + /** + * source tab + */ + private String sourceTab; + + /** + * source row count + */ + private Long sourceRowCount; + + /** + * target tab + */ + private String targetTab; + + /** + * target row count + */ + private Long targetRowCount; + + /** + * error code + */ + private String errorCode; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getProcId() { + return procId; + } + + public void setProcId(int procId) { + this.procId = procId; + } + + public String getProcName() { + return procName; + } + + public void setProcName(String procName) { + this.procName = procName; + } + + public String getProcDate() { + return procDate; + } + + public void setProcDate(String procDate) { + this.procDate = procDate; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getResult() { + return result; + } + + public void setResult(String result) { + this.result = result; + } + + public int getDuration() { + return duration; + } + + public void setDuration(int duration) { + this.duration = duration; + } + + public String getNote() { + return note; + } + + public void setNote(String note) { + this.note = note; + } + + public String getSchema() { + return schema; + } + + public void setSchema(String schema) { + this.schema = schema; + } + + public String getJobId() { + return jobId; + } + + public void setJobId(String jobId) { + this.jobId = jobId; + } + + public String getSourceTab() { + return sourceTab; + } + + public void setSourceTab(String sourceTab) { + this.sourceTab = sourceTab; + } + + public Long getSourceRowCount() { + return sourceRowCount; + } + + public void setSourceRowCount(Long sourceRowCount) { + this.sourceRowCount = sourceRowCount; + } + + public String getTargetTab() { + return targetTab; + } + + public void setTargetTab(String targetTab) { + this.targetTab = targetTab; + } + + public Long getTargetRowCount() { + return targetRowCount; + } + + public void setTargetRowCount(Long targetRowCount) { + this.targetRowCount = targetRowCount; + } + + public String getErrorCode() { + return errorCode; + } + + public void setErrorCode(String errorCode) { + this.errorCode = errorCode; + } + + @Override + public String toString(){ + return "task record, id:" + id + +" proc id:" + procId + + " proc name:" + procName + + " proc date: " + procDate + + " start date:" + startTime + + " end date:" + endTime + + " result : " + result + + " duration : " + duration + + " note : " + note + + " schema : " + schema + + " job id : " + jobId + + " source table : " + sourceTab + + " source row count: " + sourceRowCount + + " target table : " + targetTab + + " target row count: " + targetRowCount + + " error code: " + errorCode + ; + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Tenant.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Tenant.java new file mode 100644 index 0000000000..ba8ff2ea3a --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Tenant.java @@ -0,0 +1,167 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * tenant + */ +@Data +@TableName("t_escheduler_tenant") +public class Tenant { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * tenant code + */ + private String tenantCode; + + /** + * tenant name + */ + private String tenantName; + + /** + * description + */ + private String description; + + /** + * queue id + */ + private int queueId; + + /** + * queue name + */ + @TableField(exist = false) + private String queueName; + + /** + * queue + */ + @TableField(exist = false) + private String queue; + + /** + * create time + */ + private Date createTime; + /** + * update time + */ + private Date updateTime; + + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getTenantCode() { + return tenantCode; + } + + public void setTenantCode(String tenantCode) { + this.tenantCode = tenantCode; + } + + public String getTenantName() { + return tenantName; + } + + public void setTenantName(String tenantName) { + this.tenantName = tenantName; + } + + public int getQueueId() { + return queueId; + } + + public void setQueueId(int queueId) { + this.queueId = queueId; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getQueueName() { + return queueName; + } + + public void setQueueName(String queueName) { + this.queueName = queueName; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + @Override + public String toString() { + return "Tenant{" + + "id=" + id + + ", tenantCode='" + tenantCode + '\'' + + ", tenantName='" + tenantName + '\'' + + ", queueId=" + queueId + + ", queueName='" + queueName + '\'' + + ", queue='" + queue + '\'' + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UDFUser.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UDFUser.java new file mode 100644 index 0000000000..c89008e915 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UDFUser.java @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * udf user relation + */ +@Data +@TableName("t_escheduler_relation_udfs_user") +public class UDFUser { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * id + */ + private int userId; + + /** + * udf id + */ + private int udfId; + + /** + * permission + */ + private int perm; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public int getUdfId() { + return udfId; + } + + public void setUdfId(int udfId) { + this.udfId = udfId; + } + + public int getPerm() { + return perm; + } + + public void setPerm(int perm) { + this.perm = perm; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "UDFUser{" + + "id=" + id + + ", userId=" + userId + + ", udfId=" + udfId + + ", perm=" + perm + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java new file mode 100644 index 0000000000..a42492e094 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java @@ -0,0 +1,232 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.enums.UdfType; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * udf function + */ +@Data +@TableName("t_escheduler_udfs") +public class UdfFunc { + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + /** + * user id + */ + private int userId; + + /** + * udf function name + */ + private String funcName; + + /** + * udf class name + */ + private String className; + + /** + * udf argument types + */ + private String argTypes; + + /** + * udf data base + */ + private String database; + + /** + * udf description + */ + private String desc; + + /** + * resource id + */ + private int resourceId; + + /** + * resource name + */ + private String resourceName; + + /** + * udf function type: hive / spark + */ + private UdfType type; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public String getFuncName() { + return funcName; + } + + public void setFuncName(String funcName) { + this.funcName = funcName; + } + + public String getClassName() { + return className; + } + + public void setClassName(String className) { + this.className = className; + } + + public String getArgTypes() { + return argTypes; + } + + public void setArgTypes(String argTypes) { + this.argTypes = argTypes; + } + + public String getDatabase() { + return database; + } + + public void setDatabase(String database) { + this.database = database; + } + + public String getDesc() { + return desc; + } + + public void setDesc(String desc) { + this.desc = desc; + } + + public int getResourceId() { + return resourceId; + } + + public void setResourceId(int resourceId) { + this.resourceId = resourceId; + } + + public String getResourceName() { + return resourceName; + } + + public void setResourceName(String resourceName) { + this.resourceName = resourceName; + } + + public UdfType getType() { + return type; + } + + public void setType(UdfType type) { + this.type = type; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "UdfFunc{" + + "id=" + id + + ", userId=" + userId + + ", funcName='" + funcName + '\'' + + ", className='" + className + '\'' + + ", argTypes='" + argTypes + '\'' + + ", database='" + database + '\'' + + ", desc='" + desc + '\'' + + ", resourceId=" + resourceId + + ", resourceName='" + resourceName + '\'' + + ", type=" + type + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + UdfFunc udfFunc = (UdfFunc) o; + + if (id != udfFunc.id) { + return false; + } + return !(funcName != null ? !funcName.equals(udfFunc.funcName) : udfFunc.funcName != null); + + } + + @Override + public int hashCode() { + int result = id; + result = 31 * result + (funcName != null ? funcName.hashCode() : 0); + return result; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/User.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/User.java new file mode 100644 index 0000000000..f1a7d4a009 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/User.java @@ -0,0 +1,273 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + + +import org.apache.dolphinscheduler.common.enums.UserType; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.util.Date; + +/** + * user + */ +@Data +@TableName("t_escheduler_user") +@ApiModel(description = "UserModelDesc") +public class User { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * user name + */ + @ApiModelProperty(name = "userName", notes = "USER_NAME",dataType = "String",required = true) + private String userName; + + /** + * user password + */ + @ApiModelProperty(name = "userPassword", notes = "USER_PASSWORD",dataType = "String",required = true) + private String userPassword; + + /** + * mail + */ + private String email; + + /** + * phone + */ + private String phone; + + /** + * user type + */ + private UserType userType; + + /** + * tenant id + */ + private int tenantId; + + /** + * tenant code + */ + @TableField(exist = false) + private String tenantCode; + + /** + * tenant name + */ + @TableField(exist = false) + private String tenantName; + + /** + * queue name + */ + @TableField(exist = false) + private String queueName; + + /** + * alert group + */ + @TableField(exist = false) + private String alertGroup; + + /** + * user specified queue + */ + private String queue; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public String getUserPassword() { + return userPassword; + } + + public void setUserPassword(String userPassword) { + this.userPassword = userPassword; + } + + public String getEmail() { + return email; + } + + public void setEmail(String email) { + this.email = email; + } + + public UserType getUserType() { + return userType; + } + + public void setUserType(UserType userType) { + this.userType = userType; + } + + public int getTenantId() { + return tenantId; + } + + public void setTenantId(int tenantId) { + this.tenantId = tenantId; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + + public String getPhone() { + return phone; + } + + public void setPhone(String phone) { + this.phone = phone; + } + + public String getQueueName() { + return queueName; + } + + public void setQueueName(String queueName) { + this.queueName = queueName; + } + + public String getAlertGroup() { + return alertGroup; + } + + public void setAlertGroup(String alertGroup) { + this.alertGroup = alertGroup; + } + + public String getTenantName() { + return tenantName; + } + + public void setTenantName(String tenantName) { + this.tenantName = tenantName; + } + + public String getTenantCode() { + return tenantCode; + } + + public void setTenantCode(String tenantCode) { + this.tenantCode = tenantCode; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + User user = (User) o; + + if (id != user.id) { + return false; + } + return userName.equals(user.userName); + + } + + @Override + public int hashCode() { + int result = id; + result = 31 * result + userName.hashCode(); + return result; + } + + @Override + public String toString() { + return "User{" + + "id=" + id + + ", userName='" + userName + '\'' + + ", userPassword='" + userPassword + '\'' + + ", email='" + email + '\'' + + ", phone='" + phone + '\'' + + ", userType=" + userType + + ", tenantId=" + tenantId + + ", tenantCode='" + tenantCode + '\'' + + ", tenantName='" + tenantName + '\'' + + ", queueName='" + queueName + '\'' + + ", alertGroup='" + alertGroup + '\'' + + ", queue='" + queue + '\'' + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UserAlertGroup.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UserAlertGroup.java new file mode 100644 index 0000000000..d8fe65610a --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UserAlertGroup.java @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * user alert group + */ +@Data +@TableName("t_escheduler_relation_user_alertgroup") +public class UserAlertGroup { + + /** + * id + */ + @TableId(value="id", type=IdType.AUTO) + private int id; + + /** + * id + */ + @TableField("alertgroup_id") + private int alertgroupId; + + /** + * alert group name + */ + @TableField(exist = false) + private String alertgroupName; + + /** + * user id + */ + private int userId; + + /** + * user name + */ + private String userName; + + /** + * create time + */ + private Date createTime; + + /** + * update time + */ + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getAlertgroupId() { + return alertgroupId; + } + + public void setAlertgroupId(int alertgroupId) { + this.alertgroupId = alertgroupId; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getAlertgroupName() { + return alertgroupName; + } + + public void setAlertgroupName(String alertgroupName) { + this.alertgroupName = alertgroupName; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + @Override + public String toString() { + return "UserAlertGroup{" + + "id=" + id + + ", alertgroupId=" + alertgroupId + + ", alertgroupName='" + alertgroupName + '\'' + + ", userId=" + userId + + ", userName='" + userName + '\'' + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerGroup.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerGroup.java new file mode 100644 index 0000000000..29532eef83 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerGroup.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import lombok.Data; + +import java.util.Date; + +/** + * worker group for task running + */ +@Data +@TableName("t_escheduler_worker_group") +public class WorkerGroup { + + @TableId(value="id", type=IdType.AUTO) + private int id; + + private String name; + + private String ipList; + + private Date createTime; + + private Date updateTime; + + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getIpList() { + return ipList; + } + + public void setIpList(String ipList) { + this.ipList = ipList; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "Worker group model{" + + "id= " + id + + ",name= " + name + + ",ipList= " + ipList + + ",createTime= " + createTime + + ",updateTime= " + updateTime + + + "}"; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerServer.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerServer.java new file mode 100644 index 0000000000..977302061b --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerServer.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import java.util.Date; + +public class WorkerServer { + + /** + * id + */ + private int id; + + /** + * host + */ + private String host; + + /** + * port + */ + private int port; + + + /** + * zookeeper directory + */ + private String zkDirectory; + + /** + * resource info + */ + private String resInfo; + + /** + * create time + */ + private Date createTime; + + /** + * last heart beat time + */ + private Date lastHeartbeatTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getZkDirectory() { + return zkDirectory; + } + + public void setZkDirectory(String zkDirectory) { + this.zkDirectory = zkDirectory; + } + + public Date getLastHeartbeatTime() { + return lastHeartbeatTime; + } + + public void setLastHeartbeatTime(Date lastHeartbeatTime) { + this.lastHeartbeatTime = lastHeartbeatTime; + } + + public String getResInfo() { + return resInfo; + } + + public void setResInfo(String resInfo) { + this.resInfo = resInfo; + } + + @Override + public String toString() { + return "WorkerServer{" + + "id=" + id + + ", host='" + host + '\'' + + ", port=" + port + + ", zkDirectory='" + zkDirectory + '\'' + + ", resInfo='" + resInfo + '\'' + + ", createTime=" + createTime + + ", lastHeartbeatTime=" + lastHeartbeatTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ZookeeperRecord.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ZookeeperRecord.java new file mode 100644 index 0000000000..6b86ac5c11 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ZookeeperRecord.java @@ -0,0 +1,217 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import java.util.Date; + +/** + * monitor record for zookeeper + */ +public class ZookeeperRecord { + + /** + * hostname + */ + private String hostname; + + /** + * connections + */ + private int connections; + + /** + * max connections + */ + private int watches; + + /** + * sent + */ + private long sent; + + /** + * received + */ + private long received; + + /** + * mode: leader or follower + */ + private String mode; + + /** + * min Latency + */ + private int minLatency; + + /** + * avg Latency + */ + private int avgLatency; + + /** + * max Latency + */ + private int maxLatency; + + /** + * node count + */ + private int nodeCount; + + /** + * date + */ + private Date date; + + + /** + * is normal or not, 1:normal + */ + private int state; + + + public ZookeeperRecord(String hostname,int connections, int watches, long sent, long received, String mode, int minLatency, int avgLatency, int maxLatency, int nodeCount, int state,Date date) { + this.hostname = hostname; + this.connections = connections; + this.watches = watches; + this.sent = sent; + this.received = received; + this.mode = mode; + this.minLatency = minLatency; + this.avgLatency = avgLatency; + this.maxLatency = maxLatency; + this.nodeCount = nodeCount; + this.state = state; + this.date = date; + } + + + public String getHostname() { + return hostname; + } + + public void setHostname(String hostname) { + this.hostname = hostname; + } + + public int getConnections() { + return connections; + } + + public void setConnections(int connections) { + this.connections = connections; + } + + public int getWatches() { + return watches; + } + + public void setWatches(int watches) { + this.watches = watches; + } + + public long getSent() { + return sent; + } + + public void setSent(long sent) { + this.sent = sent; + } + + public long getReceived() { + return received; + } + + public void setReceived(long received) { + this.received = received; + } + + public String getMode() { + return mode; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public int getMinLatency() { + return minLatency; + } + + public void setMinLatency(int minLatency) { + this.minLatency = minLatency; + } + + public int getAvgLatency() { + return avgLatency; + } + + public void setAvgLatency(int avgLatency) { + this.avgLatency = avgLatency; + } + + public int getMaxLatency() { + return maxLatency; + } + + public void setMaxLatency(int maxLatency) { + this.maxLatency = maxLatency; + } + + public int getNodeCount() { + return nodeCount; + } + + public void setNodeCount(int nodeCount) { + this.nodeCount = nodeCount; + } + + public int getState() { + return state; + } + + public void setState(int state) { + this.state = state; + } + + public Date getDate() { + return date; + } + + public void setDate(Date date) { + this.date = date; + } + + @Override + public String toString() { + return "ZookeeperRecord{" + + "hostname='" + hostname + '\'' + + ", connections=" + connections + + ", watches=" + watches + + ", sent=" + sent + + ", received=" + received + + ", mode='" + mode + '\'' + + ", minLatency=" + minLatency + + ", avgLatency=" + avgLatency + + ", maxLatency=" + maxLatency + + ", nodeCount=" + nodeCount + + ", date=" + date + + ", state=" + state + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.java new file mode 100644 index 0000000000..0f0236c86c --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.AccessToken; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.ibatis.annotations.Param; + +public interface AccessTokenMapper extends BaseMapper { + + /** + * + * @param page + * @param userName + * @param userId + * @return + */ + IPage selectAccessTokenPage(Page page, + @Param("userName") String userName, + @Param("userId") int userId + ); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.java new file mode 100644 index 0000000000..4b3090523c --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.dao.entity.AlertGroup; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + + +public interface AlertGroupMapper extends BaseMapper { + + + IPage queryAlertGroupPage(Page page, + @Param("groupName") String groupName); + + + List queryByGroupName(@Param("groupName") String groupName); + + + List queryByUserId(@Param("userId") int userId); + + + List queryByAlertType(@Param("alertType") AlertType alertType); + + List queryAllGroupList(); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertMapper.java new file mode 100644 index 0000000000..8d3e6c8381 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertMapper.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.dao.entity.Alert; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface AlertMapper extends BaseMapper { + + + List listAlertByStatus(@Param("alertStatus") AlertStatus alertStatus); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java new file mode 100644 index 0000000000..cbaa1df30f --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.CommandCount; +import com.baomidou.mybatisplus.core.conditions.Wrapper; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.toolkit.Constants; +import org.apache.ibatis.annotations.Param; +import org.apache.ibatis.annotations.Select; + +import java.util.Date; +import java.util.List; + +public interface CommandMapper extends BaseMapper { + + + + @Select("select * from t_escheduler_command ${ew.customSqlSegment}") + List getAll(@Param(Constants.WRAPPER) Wrapper wrapper); + + Command getOneToRun(); + + List countCommandState( + @Param("userId") int userId, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime, + @Param("projectIdArray") Integer[] projectIdArray); + + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java new file mode 100644 index 0000000000..3d9aec234f --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.DataSource; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface DataSourceMapper extends BaseMapper { + + List queryDataSourceByType(@Param("userId") int userId, @Param("type") Integer type); + + IPage selectPaging(IPage page, + @Param("userId") int userId, + @Param("name") String name); + + List queryDataSourceByName(@Param("name") String name); + + + List queryAuthedDatasource(@Param("userId") int userId); + + List queryDatasourceExceptUserId(@Param("userId") int userId); + + List listAllDataSourceByType(@Param("type") Integer type); + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.java new file mode 100644 index 0000000000..77f73c1c79 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.DatasourceUser; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import org.apache.ibatis.annotations.Param; + +public interface DataSourceUserMapper extends BaseMapper { + + + int deleteByUserId(@Param("userId") int userId); + + int deleteByDatasourceId(@Param("datasourceId") int datasourceId); + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.java new file mode 100644 index 0000000000..bc2591f7e4 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.CommandCount; +import org.apache.dolphinscheduler.dao.entity.ErrorCommand; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import org.apache.ibatis.annotations.Param; + +import java.util.Date; +import java.util.List; + +public interface ErrorCommandMapper extends BaseMapper { + + List countCommandState( + @Param("startTime") Date startTime, + @Param("endTime") Date endTime, + @Param("projectIdArray") Integer[] projectIdArray); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java new file mode 100644 index 0000000000..1f4a49731d --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface ProcessDefinitionMapper extends BaseMapper { + + + ProcessDefinition queryByDefineName(@Param("projectId") int projectId, + @Param("processDefinitionName") String name); + + IPage queryDefineListPaging(IPage page, + @Param("searchVal") String searchVal, + @Param("userId") int userId, + @Param("projectId") int projectId); + + List queryAllDefinitionList(@Param("projectId") int projectId); + + List queryDefinitionListByIdList(@Param("ids") Integer[] ids); + + List countDefinitionGroupByUser( + @Param("userId") Integer userId, + @Param("projectIds") Integer[] projectIds); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.java new file mode 100644 index 0000000000..856cea9712 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface ProcessInstanceMapMapper extends BaseMapper { + + + + ProcessInstanceMap queryByParentId(@Param("parentProcessId") int parentProcessId, @Param("parentTaskId") int parentTaskId); + + + ProcessInstanceMap queryBySubProcessId(@Param("subProcessId") Integer subProcessId); + + int deleteByParentProcessId(@Param("parentProcessId") int parentProcessId); + + List querySubIdListByParentId(@Param("parentInstanceId") int parentInstanceId); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java new file mode 100644 index 0000000000..4096cf7036 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.ibatis.annotations.Param; + +import java.util.Date; +import java.util.List; + +public interface ProcessInstanceMapper extends BaseMapper { + + ProcessInstance queryDetailById(@Param("processId") int processId); + + List queryByHostAndStatus(@Param("host") String host, + @Param("states") int[] stateArray); + + IPage queryProcessInstanceListPaging(Page page, + @Param("projectId") int projectId, + @Param("processDefinitionId") Integer processDefinitionId, + @Param("searchVal") String searchVal, + @Param("states") int[] statusArray, + @Param("host") String host, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime + ); + + int setFailoverByHostAndStateArray(@Param("host") String host, + @Param("states") int[] stateArray); + + int updateProcessInstanceByState(@Param("originState") ExecutionStatus originState, + @Param("destState") ExecutionStatus destState); + + List countInstanceStateByUser( + @Param("startTime") Date startTime, + @Param("endTime") Date endTime, + @Param("projectIds") Integer[] projectIds); + + List queryByProcessDefineId( + @Param("processDefinitionId") int processDefinitionId, + @Param("size") int size); + + ProcessInstance queryLastSchedulerProcess(@Param("processDefinitionId") int definitionId, + @Param("startTime") String startTime, + @Param("endTime") String endTime); + + ProcessInstance queryLastRunningProcess(@Param("processDefinitionId") int definitionId, + @Param("startTime") String startTime, + @Param("endTime") String endTime, + @Param("states") int[] stateArray); + + ProcessInstance queryLastManualProcess(@Param("processDefinitionId") int definitionId, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java new file mode 100644 index 0000000000..6f735af353 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.Project; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface ProjectMapper extends BaseMapper { + + Project queryDetailById(@Param("projectId") int projectId); + + Project queryByName(@Param("projectName") String projectName); + + IPage queryProjectListPaging(IPage page, + @Param("userId") int userId, + @Param("searchName") String searchName); + + IPage queryAllProjectListPaging(IPage page, + @Param("searchName") String searchName); + + List queryProjectCreatedByUser(@Param("userId") int userId); + + List queryAuthedProjectListByUserId(@Param("userId") int userId); + + List queryProjectExceptUserId(@Param("userId") int userId); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.java new file mode 100644 index 0000000000..646d3aaa75 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.ProjectUser; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import org.apache.ibatis.annotations.Param; + +public interface ProjectUserMapper extends BaseMapper { + + int deleteProjectRelation(@Param("projectId") int projectId, + @Param("userId") int userId); + + ProjectUser queryProjectRelation(@Param("projectId") int projectId, + @Param("userId") int userId); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/QueueMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/QueueMapper.java new file mode 100644 index 0000000000..5ed4b10be3 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/QueueMapper.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.Queue; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * + */ +public interface QueueMapper extends BaseMapper { + + IPage queryQueuePaging(IPage page, + @Param("searchVal") String searchVal); + + List queryAllQueueList(@Param("queue") String queue, + @Param("queueName") String queueName); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java new file mode 100644 index 0000000000..c4bdbdacc8 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.Resource; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface ResourceMapper extends BaseMapper { + + /** + * + * @param alias query all if null + * @param userId query all if -1 + * @param type query all type if -1 + * @return + */ + List queryResourceList(@Param("alias") String alias, + @Param("userId") int userId, + @Param("type") int type); + + + /** + * + * @param page + * @param userId query all if -1, then query the authed resources + * @param type + * @param searchVal + * @return + */ + IPage queryResourcePaging(IPage page, + @Param("userId") int userId, + @Param("type") int type, + @Param("searchVal") String searchVal); + + /** + * + * @param userId + * @param type + * @return + */ + List queryResourceListAuthored(@Param("userId") int userId, @Param("type") int type); + + /** + * + * @param userId + * @return + */ + List queryAuthorizedResourceList(@Param("userId") int userId); + + List queryResourceExceptUserId(@Param("userId") int userId); + + + String queryTenantCodeByResourceName(@Param("resName") String resName); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.java new file mode 100644 index 0000000000..dfade7b4e7 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.ResourcesUser; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import org.apache.ibatis.annotations.Param; + +public interface ResourceUserMapper extends BaseMapper { + + int deleteResourceUser(@Param("userId") int userId, + @Param("resourceId") int resourceId); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java new file mode 100644 index 0000000000..1493754070 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.Schedule; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface ScheduleMapper extends BaseMapper { + + + IPage queryByProcessDefineIdPaging(IPage page, + @Param("processDefinitionId") int processDefinitionId, + @Param("searchVal") String searchVal); + List querySchedulerListByProjectName(@Param("projectName") String projectName); + + + List selectAllByProcessDefineArray(@Param("processDefineIds") int[] processDefineIds); + + List queryByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/SessionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/SessionMapper.java new file mode 100644 index 0000000000..94a5cc4f1f --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/SessionMapper.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.Session; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface SessionMapper extends BaseMapper { + + List queryByUserId(@Param("userId") int userId); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java new file mode 100644 index 0000000000..6f491de700 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.ibatis.annotations.Param; + +import java.util.Date; +import java.util.List; + +public interface TaskInstanceMapper extends BaseMapper { + + + List queryTaskByProcessIdAndState(@Param("processInstanceId") Integer processInstanceId, + @Param("state") Integer state); + + + TaskInstance queryById(@Param("taskInstanceId") int taskInstanceId); + + List findValidTaskListByProcessId(@Param("processInstanceId") Integer processInstanceId, + @Param("flag") Flag flag); + + List queryByHostAndStatus(@Param("host") String host, + @Param("states") String stateArray); + + int setFailoverByHostAndStateArray(@Param("host") String host, + @Param("states") String stateArray, + @Param("destStatus") ExecutionStatus destStatus); + + TaskInstance queryByInstanceIdAndName(@Param("processInstanceId") int processInstanceId, + @Param("name") String name); + + Integer countTask(@Param("userId") int userId, + @Param("userType") UserType userType, + @Param("projectIds") int[] projectIds, + @Param("taskIds") int[] taskIds); + + List countTaskInstanceStateByUser(@Param("userId") int userId, + @Param("userType") UserType userType, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime, + @Param("projectIds") String projectIds); + + IPage queryTaskInstanceListPaging(IPage page, + @Param("projectId") int projectId, + @Param("processInstanceId") Integer processInstanceId, + @Param("searchVal") String searchVal, + @Param("taskName") String taskName, + @Param("states") String statusArray, + @Param("host") String host, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime + ); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TenantMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TenantMapper.java new file mode 100644 index 0000000000..2cf90520a9 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TenantMapper.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.Tenant; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface TenantMapper extends BaseMapper { + + Tenant queryById(@Param("tenantId") int tenantId); + + List queryByTenantCode(@Param("tenantCode") String tenantCode); + + IPage queryTenantPaging(IPage page, + @Param("searchVal") String searchVal); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.java new file mode 100644 index 0000000000..3eb477d31b --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.UDFUser; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import org.apache.ibatis.annotations.Param; + +public interface UDFUserMapper extends BaseMapper { + + int deleteByUserId(@Param("userId") int userId); + + int deleteByUdfFuncId(@Param("udfFuncId") int udfFuncId); + +} + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java new file mode 100644 index 0000000000..3fa872f826 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.UdfFunc; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface UdfFuncMapper extends BaseMapper { + + + List queryUdfByIdStr(@Param("ids") String ids, + @Param("funcNames") String funcNames); + + IPage queryUdfFuncPaging(IPage page, + @Param("userId") int userId, + @Param("searchVal") String searchVal); + + List getUdfFuncByType(@Param("userId") int userId, + @Param("type") Integer type); + + List queryUdfFuncExceptUserId(@Param("userId") int userId); + + List queryAuthedUdfFunc(@Param("userId") int userId); + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapper.java new file mode 100644 index 0000000000..2361c498c7 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapper.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.UserAlertGroup; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface UserAlertGroupMapper extends BaseMapper { + + List queryForUser(@Param("alertgroupId") int alertgroupId); + + int deleteByAlertgroupId(@Param("alertgroupId") int alertgroupId); + + List listUserByAlertgroupId(@Param("alertgroupId") int alertgroupId); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java new file mode 100644 index 0000000000..61cf2bbad7 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.User; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface UserMapper extends BaseMapper { + + List queryAllGeneralUser(); + + User queryByUserNameAccurately(@Param("userName") String userName); + + User queryUserByNamePassword(@Param("userName") String userName, @Param("password") String password); + + + IPage queryUserPaging(Page page, + @Param("userName") String userName); + + User queryDetailsById(@Param("userId") int userId); + + List queryUserListByAlertGroupId(@Param("alertgroupId") int alertgroupId); + + + User queryTenantCodeByUserId(@Param("userId") int userId); + + User queryUserByToken(@Param("token") String token); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.java new file mode 100644 index 0000000000..87e3dacd7e --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.WorkerGroup; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface WorkerGroupMapper extends BaseMapper { + + List queryAllWorkerGroup(); + + List queryWorkerGroupByName(@Param("name") String name); + + IPage queryListPaging(IPage page, + @Param("searchVal") String searchVal); + +} + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java new file mode 100644 index 0000000000..aea6f2674b --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.upgrade; + +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.SchemaUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +/** + * upgrade manager + */ +public class DolphinSchedulerManager { + private static final Logger logger = LoggerFactory.getLogger(DolphinSchedulerManager.class); + UpgradeDao upgradeDao; + + private void initUpgradeDao() { + DbType dbType = UpgradeDao.getDbType(); + if (dbType != null) { + switch (dbType) { + case MYSQL: + upgradeDao = MysqlUpgradeDao.getInstance(); + break; + case POSTGRESQL: + upgradeDao = PostgresqlUpgradeDao.getInstance(); + break; + default: + logger.error("not support sql type: {},can't upgrade", dbType); + throw new IllegalArgumentException("not support sql type,can't upgrade"); + } + } + } + + public DolphinSchedulerManager() { + initUpgradeDao(); + } + + public void initDolphinScheduler() { + // Determines whether the escheduler table structure has been init + if(upgradeDao.isExistsTable("t_escheduler_version") || upgradeDao.isExistsTable("t_escheduler_queue")) { + logger.info("The database has been initialized. Skip the initialization step"); + return; + } + this.initDolphinSchedulerSchema(); + } + + public void initDolphinSchedulerSchema() { + + logger.info("Start initializing the DolphinScheduler manager table structure"); + upgradeDao.initSchema(); + } + + + /** + * upgrade DolphinScheduler + */ + public void upgradeDolphinScheduler() throws Exception{ + + // Gets a list of all upgrades + List schemaList = SchemaUtils.getAllSchemaList(); + if(schemaList == null || schemaList.size() == 0) { + logger.info("There is no schema to upgrade!"); + }else { + + String version = ""; + // Gets the version of the current system + if (upgradeDao.isExistsTable("t_escheduler_version")) { + version = upgradeDao.getCurrentVersion("t_escheduler_version"); + }else if(upgradeDao.isExistsTable("t_dolphinscheduler_version")){ + version = upgradeDao.getCurrentVersion("t_dolphinscheduler_version"); + }else if(upgradeDao.isExistsColumn("t_escheduler_queue","create_time")){ + version = "1.0.1"; + }else if(upgradeDao.isExistsTable("t_escheduler_queue")){ + version = "1.0.0"; + }else{ + logger.error("Unable to determine current software version, so cannot upgrade"); + throw new RuntimeException("Unable to determine current software version, so cannot upgrade"); + } + // The target version of the upgrade + String schemaVersion = ""; + for(String schemaDir : schemaList) { + schemaVersion = schemaDir.split("_")[0]; + if(SchemaUtils.isAGreatVersion(schemaVersion , version)) { + + logger.info("upgrade DolphinScheduler metadata version from " + version + " to " + schemaVersion); + + logger.info("Begin upgrading DolphinScheduler's table structure"); + upgradeDao.upgradeDolphinScheduler(schemaDir); + version = schemaVersion; + } + + } + } + + // Assign the value of the version field in the version table to the version of the product + upgradeDao.updateVersion(SchemaUtils.getSoftVersion()); + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java new file mode 100644 index 0000000000..6c6d333965 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.upgrade; + +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; +import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; + +public class MysqlUpgradeDao extends UpgradeDao { + + public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); + + @Override + protected void init() { + + } + + private static class MysqlUpgradeDaoHolder { + private static final MysqlUpgradeDao INSTANCE = new MysqlUpgradeDao(); + } + + private MysqlUpgradeDao() { + } + + public static final MysqlUpgradeDao getInstance() { + return MysqlUpgradeDaoHolder.INSTANCE; + } + + + /** + * Determines whether a table exists + * @param tableName + * @return + */ + public boolean isExistsTable(String tableName) { + ResultSet rs = null; + Connection conn = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + rs = conn.getMetaData().getTables(null, null, tableName, null); + if (rs.next()) { + return true; + } else { + return false; + } + + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } finally { + ConnectionUtils.releaseResource(rs, null, conn); + } + + } + + /** + * Determines whether a field exists in the specified table + * @param tableName + * @param columnName + * @return + */ + public boolean isExistsColumn(String tableName,String columnName) { + Connection conn = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + ResultSet rs = conn.getMetaData().getColumns(null,null,tableName,columnName); + if (rs.next()) { + return true; + } else { + return false; + } + + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } finally { + ConnectionUtils.releaseResource(null, null, conn); + } + + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java new file mode 100644 index 0000000000..7866f36d34 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.upgrade; + +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; +import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +public class PostgresqlUpgradeDao extends UpgradeDao { + + public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); + private static final String schema = getSchema(); + + @Override + protected void init() { + + } + + private static class PostgresqlUpgradeDaoHolder { + private static final PostgresqlUpgradeDao INSTANCE = new PostgresqlUpgradeDao(); + } + + private PostgresqlUpgradeDao() { + } + + public static final PostgresqlUpgradeDao getInstance() { + return PostgresqlUpgradeDaoHolder.INSTANCE; + } + + + @Override + public void initSchema(String initSqlPath) { + super.initSchema(initSqlPath); + } + + public static String getSchema(){ + Connection conn = null; + PreparedStatement pstmt = null; + ResultSet resultSet = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + pstmt = conn.prepareStatement("select current_schema()"); + resultSet = pstmt.executeQuery(); + while (resultSet.next()){ + if(resultSet.isFirst()){ + return resultSet.getString(1); + } + } + + } catch (SQLException e) { + logger.error(e.getMessage(),e); + } finally { + ConnectionUtils.releaseResource(resultSet, pstmt, conn); + } + return ""; + } + + /** + * Determines whether a table exists + * @param tableName + * @return + */ + public boolean isExistsTable(String tableName) { + Connection conn = null; + ResultSet rs = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + + rs = conn.getMetaData().getTables(null, schema, tableName, null); + if (rs.next()) { + return true; + } else { + return false; + } + + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } finally { + ConnectionUtils.releaseResource(rs, null, conn); + } + + } + + /** + * Determines whether a field exists in the specified table + * @param tableName + * @param columnName + * @return + */ + public boolean isExistsColumn(String tableName,String columnName) { + Connection conn = null; + ResultSet rs = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + rs = conn.getMetaData().getColumns(null,schema,tableName,columnName); + if (rs.next()) { + return true; + } else { + return false; + } + + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } finally { + ConnectionUtils.releaseResource(rs, null, conn); + + } + + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java new file mode 100644 index 0000000000..fa57b7b49f --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java @@ -0,0 +1,359 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.upgrade; + +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; +import org.apache.dolphinscheduler.common.utils.SchemaUtils; +import org.apache.dolphinscheduler.common.utils.ScriptRunner; +import org.apache.dolphinscheduler.dao.AbstractBaseDao; +import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.text.MessageFormat; + +public abstract class UpgradeDao extends AbstractBaseDao { + + public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); + private static final String T_VERSION_NAME = "t_escheduler_version"; + private static final String T_NEW_VERSION_NAME = "t_dolphinscheduler_version"; + private static final String rootDir = System.getProperty("user.dir"); + private static final DbType dbType = getCurrentDbType(); + + @Override + protected void init() { + + } + + /** + * get db type + * @return + */ + public static DbType getDbType(){ + return dbType; + } + + /** + * get db type + * @return + */ + private static DbType getCurrentDbType(){ + Connection conn = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + String name = conn.getMetaData().getDatabaseProductName().toUpperCase(); + return DbType.valueOf(name); + } catch (Exception e) { + logger.error(e.getMessage(),e); + return null; + }finally { + ConnectionUtils.releaseResource(null, null, conn); + } + } + + public void initSchema(){ + DbType dbType = getDbType(); + String initSqlPath = ""; + if (dbType != null) { + switch (dbType) { + case MYSQL: + initSqlPath = "/sql/create/release-1.0.0_schema/mysql/"; + initSchema(initSqlPath); + break; + case POSTGRESQL: + initSqlPath = "/sql/create/release-1.2.0_schema/postgresql/"; + initSchema(initSqlPath); + break; + default: + logger.error("not support sql type: {},can't upgrade", dbType); + throw new IllegalArgumentException("not support sql type,can't upgrade"); + } + } + } + + + public void initSchema(String initSqlPath) { + + // Execute the escheduler DDL, it cannot be rolled back + runInitDDL(initSqlPath); + + // Execute the escheduler DML, it can be rolled back + runInitDML(initSqlPath); + + } + + private void runInitDML(String initSqlPath) { + Connection conn = null; + if (StringUtils.isEmpty(rootDir)) { + throw new RuntimeException("Environment variable user.dir not found"); + } + //String mysqlSQLFilePath = rootDir + "/sql/create/release-1.0.0_schema/mysql/escheduler_dml.sql"; + String mysqlSQLFilePath = rootDir + initSqlPath + "dolphinscheduler_dml.sql"; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + conn.setAutoCommit(false); + // 执行escheduler_dml.sql脚本,导入escheduler相关的数据 + // Execute the ark_manager_dml.sql script to import the data related to escheduler + + ScriptRunner initScriptRunner = new ScriptRunner(conn, false, true); + Reader initSqlReader = new FileReader(new File(mysqlSQLFilePath)); + initScriptRunner.runScript(initSqlReader); + + conn.commit(); + } catch (IOException e) { + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error(e1.getMessage(),e1); + } + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } catch (Exception e) { + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error(e1.getMessage(),e1); + } + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } finally { + ConnectionUtils.releaseResource(null, null, conn); + + } + + } + + private void runInitDDL(String initSqlPath) { + Connection conn = null; + if (StringUtils.isEmpty(rootDir)) { + throw new RuntimeException("Environment variable user.dir not found"); + } + //String mysqlSQLFilePath = rootDir + "/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql"; + String mysqlSQLFilePath = rootDir + initSqlPath + "dolphinscheduler_ddl.sql"; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + // Execute the escheduler_ddl.sql script to create the table structure of escheduler + ScriptRunner initScriptRunner = new ScriptRunner(conn, true, true); + Reader initSqlReader = new FileReader(new File(mysqlSQLFilePath)); + initScriptRunner.runScript(initSqlReader); + + } catch (IOException e) { + + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } catch (Exception e) { + + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } finally { + ConnectionUtils.releaseResource(null, null, conn); + + } + + } + + /** + * Determines whether a table exists + * @param tableName + * @return + */ + public abstract boolean isExistsTable(String tableName); + + /** + * Determines whether a field exists in the specified table + * @param tableName + * @param columnName + * @return + */ + public abstract boolean isExistsColumn(String tableName,String columnName); + + + public String getCurrentVersion(String versionName) { + String sql = String.format("select version from %s",versionName); + Connection conn = null; + ResultSet rs = null; + PreparedStatement pstmt = null; + String version = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + pstmt = conn.prepareStatement(sql); + rs = pstmt.executeQuery(); + + if (rs.next()) { + version = rs.getString(1); + } + + return version; + + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException("sql: " + sql, e); + } finally { + ConnectionUtils.releaseResource(rs, pstmt, conn); + } + } + + + public void upgradeDolphinScheduler(String schemaDir) { + + upgradeDolphinSchedulerDDL(schemaDir); + + upgradeDolphinSchedulerDML(schemaDir); + + } + + private void upgradeDolphinSchedulerDML(String schemaDir) { + String schemaVersion = schemaDir.split("_")[0]; + if (StringUtils.isEmpty(rootDir)) { + throw new RuntimeException("Environment variable user.dir not found"); + } + String mysqlSQLFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_dml.sql",rootDir,schemaDir,getDbType().name().toLowerCase()); + logger.info("mysqlSQLFilePath"+mysqlSQLFilePath); + Connection conn = null; + PreparedStatement pstmt = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + conn.setAutoCommit(false); + // Execute the upgraded escheduler dml + ScriptRunner scriptRunner = new ScriptRunner(conn, false, true); + Reader sqlReader = new FileReader(new File(mysqlSQLFilePath)); + scriptRunner.runScript(sqlReader); + if (isExistsTable(T_VERSION_NAME)) { + // Change version in the version table to the new version + String upgradeSQL = String.format("update %s set version = ?",T_VERSION_NAME); + pstmt = conn.prepareStatement(upgradeSQL); + pstmt.setString(1, schemaVersion); + pstmt.executeUpdate(); + }else if (isExistsTable(T_NEW_VERSION_NAME)) { + // Change version in the version table to the new version + String upgradeSQL = String.format("update %s set version = ?",T_NEW_VERSION_NAME); + pstmt = conn.prepareStatement(upgradeSQL); + pstmt.setString(1, schemaVersion); + pstmt.executeUpdate(); + } + conn.commit(); + } catch (FileNotFoundException e) { + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error(e1.getMessage(),e1); + } + logger.error(e.getMessage(),e); + throw new RuntimeException("sql file not found ", e); + } catch (IOException e) { + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error(e1.getMessage(),e1); + } + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } catch (SQLException e) { + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error(e1.getMessage(),e1); + } + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } catch (Exception e) { + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error(e1.getMessage(),e1); + } + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } finally { + ConnectionUtils.releaseResource(null, pstmt, conn); + } + + } + + private void upgradeDolphinSchedulerDDL(String schemaDir) { + if (StringUtils.isEmpty(rootDir)) { + throw new RuntimeException("Environment variable user.dir not found"); + } + String mysqlSQLFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_ddl.sql",rootDir,schemaDir,getDbType().name().toLowerCase()); + Connection conn = null; + PreparedStatement pstmt = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + String dbName = conn.getCatalog(); + logger.info(dbName); + conn.setAutoCommit(true); + // Execute the escheduler ddl.sql for the upgrade + ScriptRunner scriptRunner = new ScriptRunner(conn, true, true); + Reader sqlReader = new FileReader(new File(mysqlSQLFilePath)); + scriptRunner.runScript(sqlReader); + + } catch (FileNotFoundException e) { + + logger.error(e.getMessage(),e); + throw new RuntimeException("sql file not found ", e); + } catch (IOException e) { + + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } catch (SQLException e) { + + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } catch (Exception e) { + + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage(),e); + } finally { + ConnectionUtils.releaseResource(null, pstmt, conn); + } + + } + + + + public void updateVersion(String version) { + // Change version in the version table to the new version + String versionName = T_VERSION_NAME; + if(!SchemaUtils.isAGreatVersion("1.2.0" , version)){ + versionName = "t_dolphinscheduler_version"; + } + String upgradeSQL = String.format("update %s set version = ?",versionName); + PreparedStatement pstmt = null; + Connection conn = null; + try { + conn = ConnectionFactory.getDataSource().getConnection(); + pstmt = conn.prepareStatement(upgradeSQL); + pstmt.setString(1, version); + pstmt.executeUpdate(); + + } catch (SQLException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException("sql: " + upgradeSQL, e); + } finally { + ConnectionUtils.releaseResource(null, pstmt, conn); + } + + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java new file mode 100644 index 0000000000..a70541cc7c --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.upgrade.shell; + +import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * init DolphinScheduler + * + */ +public class CreateDolphinScheduler { + + private static final Logger logger = LoggerFactory.getLogger(CreateDolphinScheduler.class); + + public static void main(String[] args) { + DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); + + try { + dolphinSchedulerManager.initDolphinScheduler(); + logger.info("init DolphinScheduler finished"); + dolphinSchedulerManager.upgradeDolphinScheduler(); + logger.info("upgrade DolphinScheduler finished"); + logger.info("create DolphinScheduler success"); + } catch (Exception e) { + logger.error("create DolphinScheduler failed",e); + } + + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java new file mode 100644 index 0000000000..3a0e00f655 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.upgrade.shell; + +import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * init DolphinScheduler + * + */ +public class InitDolphinScheduler { + + private static final Logger logger = LoggerFactory.getLogger(InitDolphinScheduler.class); + + public static void main(String[] args) { + Thread.currentThread().setName("manager-InitDolphinScheduler"); + DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); + dolphinSchedulerManager.initDolphinScheduler(); + logger.info("init DolphinScheduler finished"); + + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java new file mode 100644 index 0000000000..9da418ba68 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.upgrade.shell; + +import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * upgrade DolphinScheduler database + */ +public class UpgradeDolphinScheduler { + private static final Logger logger = LoggerFactory.getLogger(UpgradeDolphinScheduler.class); + + public static void main(String[] args) { + DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); + try { + dolphinSchedulerManager.upgradeDolphinScheduler(); + logger.info("upgrade DolphinScheduler success"); + } catch (Exception e) { + logger.error(e.getMessage(),e); + logger.info("Upgrade DolphinScheduler failed"); + throw new RuntimeException(e); + } + } + + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/BeanContext.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/BeanContext.java new file mode 100644 index 0000000000..6719c299a0 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/BeanContext.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.utils; + + +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.stereotype.Component; + +/** + * bean context + */ +@Component + public class BeanContext implements ApplicationContextAware { + private static ApplicationContext applicationContext; + + public static ApplicationContext getApplicationContext(){ + return applicationContext; + } + + @SuppressWarnings("unchecked") + public static T getBean(String name) throws BeansException { + return (T)applicationContext.getBean(name); + } + + public static T getBean(Class clz) throws BeansException { + return applicationContext.getBean(clz); + } + + + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + BeanContext.applicationContext = applicationContext; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java new file mode 100644 index 0000000000..dba8aeb9fc --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.utils; + + +import org.apache.dolphinscheduler.common.enums.TaskDependType; +import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessData; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; + +/** + * dag tools + */ +public class DagHelper { + + + private static final Logger logger = LoggerFactory.getLogger(DagHelper.class); + + /** + * generate flow node relation list by task node list; + * Edges that are not in the task Node List will not be added to the result + * 根据task Node List生成node关系列表,不在task Node List中的边不会被添加到结果中 + * + * @param taskNodeList + * @return + */ + private static List generateRelationListByFlowNodes(List taskNodeList) { + List nodeRelationList = new ArrayList<>(); + for (TaskNode taskNode : taskNodeList) { + String preTasks = taskNode.getPreTasks(); + List preTaskList = JSONUtils.toList(preTasks, String.class); + if (preTaskList != null) { + for (String depNodeName : preTaskList) { + if (null != findNodeByName(taskNodeList, depNodeName)) { + nodeRelationList.add(new TaskNodeRelation(depNodeName, taskNode.getName())); + } + } + } + } + return nodeRelationList; + } + + /** + * generate task nodes needed by dag + * 生成dag需要的task nodes + * + * @param taskNodeList + * @param taskDependType + * @return + */ + private static List generateFlowNodeListByStartNode(List taskNodeList, List startNodeNameList, + List recoveryNodeNameList, TaskDependType taskDependType) { + List destFlowNodeList = new ArrayList<>(); + List startNodeList = startNodeNameList; + + if(taskDependType != TaskDependType.TASK_POST + && startNodeList.size() == 0){ + logger.error("start node list is empty! cannot continue run the process "); + return destFlowNodeList; + } + List destTaskNodeList = new ArrayList<>(); + List tmpTaskNodeList = new ArrayList<>(); + if (taskDependType == TaskDependType.TASK_POST + && recoveryNodeNameList.size() > 0) { + startNodeList = recoveryNodeNameList; + } + if (startNodeList == null || startNodeList.size() == 0) { + // 没有特殊的指定start nodes + tmpTaskNodeList = taskNodeList; + } else { + // 指定了start nodes or 恢复执行 + for (String startNodeName : startNodeList) { + TaskNode startNode = findNodeByName(taskNodeList, startNodeName); + List childNodeList = new ArrayList<>(); + if (TaskDependType.TASK_POST == taskDependType) { + childNodeList = getFlowNodeListPost(startNode, taskNodeList); + } else if (TaskDependType.TASK_PRE == taskDependType) { + childNodeList = getFlowNodeListPre(startNode, recoveryNodeNameList, taskNodeList); + } else { + childNodeList.add(startNode); + } + tmpTaskNodeList.addAll(childNodeList); + } + } + + for (TaskNode taskNode : tmpTaskNodeList) { + if (null == findNodeByName(destTaskNodeList, taskNode.getName())) { + destTaskNodeList.add(taskNode); + } + } + return destTaskNodeList; + } + + + /** + * find all the nodes that depended on the start node + * 找到所有依赖start node的node + * + * @param startNode + * @param taskNodeList + * @return + */ + private static List getFlowNodeListPost(TaskNode startNode, List taskNodeList) { + List resultList = new ArrayList<>(); + for (TaskNode taskNode : taskNodeList) { + List depList = taskNode.getDepList(); + if (depList != null) { + if (depList.contains(startNode.getName())) { + resultList.addAll(getFlowNodeListPost(taskNode, taskNodeList)); + } + } + + } + resultList.add(startNode); + return resultList; + } + + /** + * find all nodes that start nodes depend on. + * 找到所有start node依赖的node + * + * @param startNode + * @param taskNodeList + * @return + */ + private static List getFlowNodeListPre(TaskNode startNode, List recoveryNodeNameList, List taskNodeList) { + + List resultList = new ArrayList<>(); + + List depList = startNode.getDepList(); + resultList.add(startNode); + if (depList == null || depList.size() == 0) { + return resultList; + } + for (String depNodeName : depList) { + TaskNode start = findNodeByName(taskNodeList, depNodeName); + if (recoveryNodeNameList.contains(depNodeName)) { + resultList.add(start); + } else { + resultList.addAll(getFlowNodeListPre(start, recoveryNodeNameList, taskNodeList)); + } + } + return resultList; + } + + /** + * generate dag by start nodes and recovery nodes + * 根据start nodes 和 recovery nodes 生成dag + * @param processDefinitionJson + * @param startNodeNameList + * @param recoveryNodeNameList + * @param depNodeType + * @return + * @throws Exception + */ + public static ProcessDag generateFlowDag(String processDefinitionJson, + List startNodeNameList, + List recoveryNodeNameList, + TaskDependType depNodeType) throws Exception { + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + List taskNodeList = processData.getTasks(); + List destTaskNodeList = generateFlowNodeListByStartNode(taskNodeList, startNodeNameList, recoveryNodeNameList, depNodeType); + if (destTaskNodeList.isEmpty()) { + return null; + } + List taskNodeRelations = generateRelationListByFlowNodes(destTaskNodeList); + ProcessDag processDag = new ProcessDag(); + processDag.setEdges(taskNodeRelations); + processDag.setNodes(destTaskNodeList); + return processDag; + } + + /** + * parse the forbidden task nodes in process definition. + * @param processDefinitionJson + * @return + */ + public static Map getForbiddenTaskNodeMaps(String processDefinitionJson){ + Map forbidTaskNodeMap = new ConcurrentHashMap<>(); + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + List taskNodeList = processData.getTasks(); + for(TaskNode node : taskNodeList){ + if(node.isForbidden()){ + forbidTaskNodeMap.putIfAbsent(node.getName(), node); + } + } + return forbidTaskNodeMap; + } + + + /** + * find node by node name + * 通过 name 获取节点 + * @param nodeDetails + * @param nodeName + * @return + * @see TaskNode + */ + public static TaskNode findNodeByName(List nodeDetails, String nodeName) { + for (TaskNode taskNode : nodeDetails) { + if (taskNode.getName().equals(nodeName)) { + return taskNode; + } + } + return null; + } + + + /** + * get start vertex in one dag + * it would find the post node if the start vertex is forbidden running + * @param parentNodeName the previous node + * @param dag + * @param completeTaskList + * @return + */ + public static Collection getStartVertex(String parentNodeName, DAG dag, + Map completeTaskList){ + + if(completeTaskList == null){ + completeTaskList = new HashMap<>(); + } + Collection startVertexs = null; + if(StringUtils.isNotEmpty(parentNodeName)){ + startVertexs = dag.getSubsequentNodes(parentNodeName); + }else{ + startVertexs = dag.getBeginNode(); + } + + List tmpStartVertexs = new ArrayList<>(); + if(startVertexs!= null){ + tmpStartVertexs.addAll(startVertexs); + } + + for(String start : startVertexs){ + TaskNode startNode = dag.getNode(start); + if(!startNode.isForbidden() && !completeTaskList.containsKey(start)){ + continue; + } + Collection postNodes = getStartVertex(start, dag, completeTaskList); + + for(String post : postNodes){ + if(checkForbiddenPostCanSubmit(post, dag)){ + tmpStartVertexs.add(post); + } + } + tmpStartVertexs.remove(start); + } + + return tmpStartVertexs; + } + + /** + * + * @param postNodeName + * @param dag + * @return + */ + private static boolean checkForbiddenPostCanSubmit(String postNodeName, DAG dag){ + + TaskNode postNode = dag.getNode(postNodeName); + List dependList = postNode.getDepList(); + + for(String dependNodeName : dependList){ + TaskNode dependNode = dag.getNode(dependNodeName); + if(!dependNode.isForbidden()){ + return false; + } + } + return true; + } + + + + /*** + * generate dag graph + * @param processDag + * @return + */ + public static DAG buildDagGraph(ProcessDag processDag) { + + DAG dag = new DAG<>(); + + /** + * add vertex + */ + if (CollectionUtils.isNotEmpty(processDag.getNodes())){ + for (TaskNode node : processDag.getNodes()){ + dag.addNode(node.getName(),node); + } + } + + /** + * add edge + */ + if (CollectionUtils.isNotEmpty(processDag.getEdges())){ + for (TaskNodeRelation edge : processDag.getEdges()){ + dag.addEdge(edge.getStartNode(),edge.getEndNode()); + } + } + return dag; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PropertyUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PropertyUtils.java new file mode 100644 index 0000000000..ca08bf1c2e --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PropertyUtils.java @@ -0,0 +1,192 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.utils; + +import org.apache.dolphinscheduler.common.Constants; +import com.baomidou.mybatisplus.core.toolkit.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + + +/** + * property utils + * single instance + */ +public class PropertyUtils { + + /** + * logger + */ + private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class); + + private static final Properties properties = new Properties(); + + private static final PropertyUtils propertyUtils = new PropertyUtils(); + + private PropertyUtils(){ + init(); + } + + private void init(){ + String[] propertyFiles = new String[]{Constants.DAO_PROPERTIES_PATH}; + for (String fileName : propertyFiles) { + InputStream fis = null; + try { + fis = PropertyUtils.class.getResourceAsStream(fileName); + properties.load(fis); + + } catch (IOException e) { + logger.error(e.getMessage(), e); + System.exit(1); + } finally { + IOUtils.closeQuietly(fis); + } + } + } + +/* + public static PropertyUtils getInstance(){ + return propertyUtils; + } +*/ + + /** + * get property value + * + * @param key property name + * @return + */ + public static String getString(String key) { + return properties.getProperty(key); + } + + /** + * get property value + * + * @param key property name + * @return get property int value , if key == null, then return -1 + */ + public static int getInt(String key) { + return getInt(key, -1); + } + + /** + * + * @param key + * @param defaultValue + * @return + */ + public static int getInt(String key, int defaultValue) { + String value = getString(key); + if (value == null) { + return defaultValue; + } + + try { + return Integer.parseInt(value); + } catch (NumberFormatException e) { + logger.info(e.getMessage(),e); + } + return defaultValue; + } + + /** + * get property value + * + * @param key property name + * @return + */ + public static Boolean getBoolean(String key) { + String value = properties.getProperty(key.trim()); + if(null != value){ + return Boolean.parseBoolean(value); + } + + return null; + } + + /** + * + * @param key + * @return + */ + public static long getLong(String key) { + return getLong(key,-1); + } + + /** + * + * @param key + * @param defaultVal + * @return + */ + public static long getLong(String key, long defaultVal) { + String val = getString(key); + return val == null ? defaultVal : Long.parseLong(val); + } + + + /** + * + * @param key + * @param defaultVal + * @return + */ + public double getDouble(String key, double defaultVal) { + String val = getString(key); + return val == null ? defaultVal : Double.parseDouble(val); + } + + + /** + * get array + * @param key property name + * @param splitStr separator + * @return + */ + public static String[] getArray(String key, String splitStr) { + String value = getString(key); + if (value == null) { + return null; + } + try { + String[] propertyArray = value.split(splitStr); + return propertyArray; + } catch (NumberFormatException e) { + logger.info(e.getMessage(),e); + } + return null; + } + + /** + * + * @param key + * @param type + * @param defaultValue + * @param + * @return get enum value + */ + public > T getEnum(String key, Class type, + T defaultValue) { + String val = getString(key); + return val == null ? defaultValue : Enum.valueOf(type, val); + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/AbstractCycle.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/AbstractCycle.java new file mode 100644 index 0000000000..d062c6abf6 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/AbstractCycle.java @@ -0,0 +1,169 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.utils.cron; + +import org.apache.dolphinscheduler.common.enums.CycleEnum; +import com.cronutils.model.Cron; +import com.cronutils.model.field.CronField; +import com.cronutils.model.field.CronFieldName; +import com.cronutils.model.field.expression.*; + +/** + * Cycle + */ +public abstract class AbstractCycle { + + protected Cron cron; + + protected CronField minField; + protected CronField hourField; + protected CronField dayOfMonthField; + protected CronField dayOfWeekField; + protected CronField monthField; + protected CronField yearField; + + public CycleLinks addCycle(AbstractCycle cycle) { + return new CycleLinks(this.cron).addCycle(this).addCycle(cycle); + } + + public AbstractCycle(Cron cron) { + if (cron == null) { + throw new IllegalArgumentException("cron must not be null!"); + } + + this.cron = cron; + this.minField = cron.retrieve(CronFieldName.MINUTE); + this.hourField = cron.retrieve(CronFieldName.HOUR); + this.dayOfMonthField = cron.retrieve(CronFieldName.DAY_OF_MONTH); + this.dayOfWeekField = cron.retrieve(CronFieldName.DAY_OF_WEEK); + this.monthField = cron.retrieve(CronFieldName.MONTH); + this.yearField = cron.retrieve(CronFieldName.YEAR); + } + + /** + * Whether the minute field has a value + * @return + */ + protected boolean minFiledIsSetAll(){ + FieldExpression minFieldExpression = minField.getExpression(); + return (minFieldExpression instanceof Every || minFieldExpression instanceof Always + || minFieldExpression instanceof Between || minFieldExpression instanceof And + || minFieldExpression instanceof On); + } + + + /** + * Whether the minute field has a value of every or always + * @return + */ + protected boolean minFiledIsEvery(){ + FieldExpression minFieldExpression = minField.getExpression(); + return (minFieldExpression instanceof Every || minFieldExpression instanceof Always); + } + + /** + * Whether the hour field has a value + * @return + */ + protected boolean hourFiledIsSetAll(){ + FieldExpression hourFieldExpression = hourField.getExpression(); + return (hourFieldExpression instanceof Every || hourFieldExpression instanceof Always + || hourFieldExpression instanceof Between || hourFieldExpression instanceof And + || hourFieldExpression instanceof On); + } + + /** + * Whether the hour field has a value of every or always + * @return + */ + protected boolean hourFiledIsEvery(){ + FieldExpression hourFieldExpression = hourField.getExpression(); + return (hourFieldExpression instanceof Every || hourFieldExpression instanceof Always); + } + + /** + * Whether the day Of month field has a value + * @return + */ + protected boolean dayOfMonthFieldIsSetAll(){ + return (dayOfMonthField.getExpression() instanceof Every || dayOfMonthField.getExpression() instanceof Always + || dayOfMonthField.getExpression() instanceof Between || dayOfMonthField.getExpression() instanceof And + || dayOfMonthField.getExpression() instanceof On); + } + + + /** + * Whether the day Of Month field has a value of every or always + * @return + */ + protected boolean dayOfMonthFieldIsEvery(){ + return (dayOfMonthField.getExpression() instanceof Every || dayOfMonthField.getExpression() instanceof Always); + } + + /** + * Whether month field has a value + * @return + */ + protected boolean monthFieldIsSetAll(){ + FieldExpression monthFieldExpression = monthField.getExpression(); + return (monthFieldExpression instanceof Every || monthFieldExpression instanceof Always + || monthFieldExpression instanceof Between || monthFieldExpression instanceof And + || monthFieldExpression instanceof On); + } + + /** + * Whether the month field has a value of every or always + * @return + */ + protected boolean monthFieldIsEvery(){ + FieldExpression monthFieldExpression = monthField.getExpression(); + return (monthFieldExpression instanceof Every || monthFieldExpression instanceof Always); + } + + /** + * Whether the day Of week field has a value + * @return + */ + protected boolean dayofWeekFieldIsSetAll(){ + FieldExpression dayOfWeekFieldExpression = dayOfWeekField.getExpression(); + return (dayOfWeekFieldExpression instanceof Every || dayOfWeekFieldExpression instanceof Always + || dayOfWeekFieldExpression instanceof Between || dayOfWeekFieldExpression instanceof And + || dayOfWeekFieldExpression instanceof On); + } + + /** + * Whether the day Of week field has a value of every or always + * @return + */ + protected boolean dayofWeekFieldIsEvery(){ + FieldExpression dayOfWeekFieldExpression = dayOfWeekField.getExpression(); + return (dayOfWeekFieldExpression instanceof Every || dayOfWeekFieldExpression instanceof Always); + } + /** + * get cycle enum + * + * @return + */ + protected abstract CycleEnum getCycle(); + + /** + * get mini level cycle enum + * + * @return + */ + protected abstract CycleEnum getMiniCycle(); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CronUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CronUtils.java new file mode 100644 index 0000000000..be5dfd0396 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CronUtils.java @@ -0,0 +1,207 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.utils.cron; + + +import org.apache.dolphinscheduler.common.enums.CycleEnum; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import com.cronutils.model.Cron; +import com.cronutils.model.definition.CronDefinitionBuilder; +import com.cronutils.parser.CronParser; +import org.quartz.CronExpression; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.ParseException; +import java.util.*; + +import static com.cronutils.model.CronType.QUARTZ; +import static org.apache.dolphinscheduler.dao.utils.cron.CycleFactory.*; + + +/** + * cron utils + */ +public class CronUtils { + + private static final Logger logger = LoggerFactory.getLogger(CronUtils.class); + + + private static final CronParser QUARTZ_CRON_PARSER = new CronParser(CronDefinitionBuilder.instanceDefinitionFor(QUARTZ)); + + /** + * Parse string with cron expression to Cron + * + * @param cronExpression + * - cron expression, never null + * @return Cron instance, corresponding to cron expression received + * @throws java.lang.IllegalArgumentException + * if expression does not match cron definition + */ + public static Cron parse2Cron(String cronExpression) { + return QUARTZ_CRON_PARSER.parse(cronExpression); + } + + /** + * build a new CronExpression based on the string cronExpression. + * + * @param cronExpression String representation of the cron expression the + * new object should represent + * @throws java.text.ParseException + * if the string expression cannot be parsed into a valid + * CronExpression + */ + public static CronExpression parse2CronExpression(String cronExpression) throws ParseException { + return new CronExpression(cronExpression); + } + + /** + * get cycle enum + * @param cron + * @return + */ + public static CycleEnum getMaxCycle(Cron cron) { + return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).getCycle(); + } + + /** + * get cycle enum + * @param cron + * @return + */ + public static CycleEnum getMiniCycle(Cron cron) { + return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).getMiniCycle(); + } + + /** + * get mini level of cycle enum + * + * @param crontab + * @return + */ + public static CycleEnum getMiniCycle(String crontab) { + return getMiniCycle(parse2Cron(crontab)); + } + + /** + * get cycle enum + * + * @param crontab + * @return + */ + public static CycleEnum getMaxCycle(String crontab) { + return getMaxCycle(parse2Cron(crontab)); + } + + /** + * gets all scheduled times for a period of time based on not self dependency + * @param startTime + * @param endTime + * @param cronExpression + * @return + */ + public static List getFireDateList(Date startTime, Date endTime, CronExpression cronExpression) { + List dateList = new ArrayList<>(); + + while (Stopper.isRunning()) { + startTime = cronExpression.getNextValidTimeAfter(startTime); + if (startTime.after(endTime)) { + break; + } + dateList.add(startTime); + } + + return dateList; + } + + /** + * gets all scheduled times for a period of time based on self dependency + * @param startTime + * @param endTime + * @param cronExpression + * @return + */ + public static List getSelfFireDateList(Date startTime, Date endTime, CronExpression cronExpression) { + List dateList = new ArrayList<>(); + + while (Stopper.isRunning()) { + startTime = cronExpression.getNextValidTimeAfter(startTime); + if (startTime.after(endTime) || startTime.equals(endTime)) { + break; + } + dateList.add(startTime); + } + + return dateList; + } + + + /** + * get expiration time + * @param startTime + * @param cycleEnum + * @return + */ + public static Date getExpirationTime(Date startTime, CycleEnum cycleEnum) { + Date maxExpirationTime = null; + Date startTimeMax = null; + try { + startTimeMax = getEndTime(startTime); + + Calendar calendar = Calendar.getInstance(); + calendar.setTime(startTime); + switch (cycleEnum) { + case HOUR: + calendar.add(Calendar.HOUR, 1); + break; + case DAY: + calendar.add(Calendar.DATE, 1); + break; + case WEEK: + calendar.add(Calendar.DATE, 1); + break; + case MONTH: + calendar.add(Calendar.DATE, 1); + break; + default: + logger.error("Dependent process definition's cycleEnum is {},not support!!", cycleEnum.name()); + break; + } + maxExpirationTime = calendar.getTime(); + } catch (Exception e) { + logger.error(e.getMessage(),e); + } + return DateUtils.compare(startTimeMax,maxExpirationTime)?maxExpirationTime:startTimeMax; + } + + /** + * get the end time of the day by value of date + * @param date + * @return + */ + private static Date getEndTime(Date date) { + Calendar end = new GregorianCalendar(); + end.setTime(date); + end.set(Calendar.HOUR_OF_DAY,23); + end.set(Calendar.MINUTE,59); + end.set(Calendar.SECOND,59); + end.set(Calendar.MILLISECOND,999); + return end.getTime(); + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleFactory.java new file mode 100644 index 0000000000..3e5013edeb --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleFactory.java @@ -0,0 +1,211 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.utils.cron; + +import org.apache.dolphinscheduler.common.enums.CycleEnum; +import com.cronutils.model.Cron; +import com.cronutils.model.field.expression.Always; +import com.cronutils.model.field.expression.QuestionMark; + +/** + * Crontab Cycle Tool Factory + */ +public class CycleFactory { + + public static AbstractCycle min(Cron cron) { + return new MinCycle(cron); + } + + public static AbstractCycle hour(Cron cron) { + return new HourCycle(cron); + } + + public static AbstractCycle day(Cron cron) { + return new DayCycle(cron); + } + + public static AbstractCycle week(Cron cron) { + return new WeekCycle(cron); + } + + public static AbstractCycle month(Cron cron) { + return new MonthCycle(cron); + } + + /** + * day cycle + */ + public static class DayCycle extends AbstractCycle { + + public DayCycle(Cron cron) { + super(cron); + } + + @Override + protected CycleEnum getCycle() { + + if (minFiledIsSetAll() + && hourFiledIsSetAll() + && dayOfMonthFieldIsEvery() + && dayOfWeekField.getExpression() instanceof QuestionMark + && monthField.getExpression() instanceof Always) { + return CycleEnum.DAY; + } + + return null; + } + + @Override + protected CycleEnum getMiniCycle() { + if (dayOfMonthFieldIsEvery()) { + return CycleEnum.DAY; + } + + return null; + } + } + + /** + * hour cycle + */ + public static class HourCycle extends AbstractCycle { + + public HourCycle(Cron cron) { + super(cron); + } + + @Override + protected CycleEnum getCycle() { + if (minFiledIsSetAll() + && hourFiledIsEvery() + && dayOfMonthField.getExpression() instanceof Always + && dayOfWeekField.getExpression() instanceof QuestionMark + && monthField.getExpression() instanceof Always) { + return CycleEnum.HOUR; + } + + return null; + } + + @Override + protected CycleEnum getMiniCycle() { + if(hourFiledIsEvery()){ + return CycleEnum.HOUR; + } + return null; + } + } + + /** + * minute cycle + */ + public static class MinCycle extends AbstractCycle { + + public MinCycle(Cron cron) { + super(cron); + } + + @Override + protected CycleEnum getCycle() { + if (minFiledIsEvery() + && hourField.getExpression() instanceof Always + && dayOfMonthField.getExpression() instanceof Always + && monthField.getExpression() instanceof Always) { + return CycleEnum.MINUTE; + } + + return null; + } + + @Override + protected CycleEnum getMiniCycle() { + if(minFiledIsEvery()){ + return CycleEnum.MINUTE; + } + return null; + } + } + + /** + * month cycle + */ + public static class MonthCycle extends AbstractCycle { + + public MonthCycle(Cron cron) { + super(cron); + } + + @Override + protected CycleEnum getCycle() { + boolean flag = (minFiledIsSetAll() + && hourFiledIsSetAll() + && dayOfMonthFieldIsSetAll() + && dayOfWeekField.getExpression() instanceof QuestionMark + && monthFieldIsEvery()) || + (minFiledIsSetAll() + && hourFiledIsSetAll() + && dayOfMonthField.getExpression() instanceof QuestionMark + && dayofWeekFieldIsSetAll() + && monthFieldIsEvery()); + if (flag) { + return CycleEnum.MONTH; + } + + return null; + } + + @Override + protected CycleEnum getMiniCycle() { + if (monthFieldIsEvery()) { + return CycleEnum.MONTH; + } + + return null; + } + } + + /** + * week cycle + */ + public static class WeekCycle extends AbstractCycle { + public WeekCycle(Cron cron) { + super(cron); + } + + @Override + protected CycleEnum getCycle() { + if (minFiledIsSetAll() + && hourFiledIsSetAll() + && dayOfMonthField.getExpression() instanceof QuestionMark + && dayofWeekFieldIsEvery() + && monthField.getExpression() instanceof Always) { + return CycleEnum.WEEK; + } + + return null; + } + + @Override + protected CycleEnum getMiniCycle() { + if (dayofWeekFieldIsEvery()) { + return CycleEnum.WEEK; + } + + return null; + } + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleLinks.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleLinks.java new file mode 100644 index 0000000000..f4e08cefc9 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleLinks.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.utils.cron; + +import org.apache.dolphinscheduler.common.enums.CycleEnum; +import com.cronutils.model.Cron; + +import java.util.ArrayList; +import java.util.List; + +/** + * 链接判断工具 + */ +public class CycleLinks extends AbstractCycle { + private final List cycleList = new ArrayList<>(); + + public CycleLinks(Cron cron) { + super(cron); + } + + @Override + public CycleLinks addCycle(AbstractCycle cycle) { + cycleList.add(cycle); + return this; + } + + @Override + protected CycleEnum getCycle() { + for (AbstractCycle abstractCycle : cycleList) { + CycleEnum cycle = abstractCycle.getCycle(); + if (cycle != null) { + return cycle; + } + } + + return null; + } + + @Override + protected CycleEnum getMiniCycle() { + for (AbstractCycle cycleHelper : cycleList) { + CycleEnum cycle = cycleHelper.getMiniCycle(); + if (cycle != null) { + return cycle; + } + } + + return null; + } +} \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/application.yml b/dolphinscheduler-dao/src/main/resources/application.yml similarity index 100% rename from escheduler-dao/src/main/resources/application.yml rename to dolphinscheduler-dao/src/main/resources/application.yml diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AccessTokenMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AccessTokenMapper.xml new file mode 100644 index 0000000000..15dceb7004 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AccessTokenMapper.xml @@ -0,0 +1,16 @@ + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertGroupMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertGroupMapper.xml new file mode 100644 index 0000000000..347cef0022 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertGroupMapper.xml @@ -0,0 +1,30 @@ + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertMapper.xml new file mode 100644 index 0000000000..4c38335992 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertMapper.xml @@ -0,0 +1,9 @@ + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/CommandMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/CommandMapper.xml new file mode 100644 index 0000000000..b1455412e7 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/CommandMapper.xml @@ -0,0 +1,26 @@ + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceMapper.xml new file mode 100644 index 0000000000..101313cdd7 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceMapper.xml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceUserMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceUserMapper.xml new file mode 100644 index 0000000000..0178b36acf --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceUserMapper.xml @@ -0,0 +1,13 @@ + + + + + delete from t_escheduler_datasource_user + where user_id = #{userId} + + + + delete from t_escheduler_datasource_user + where datasource_id = #{datasourceId} + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ErrorCommandMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ErrorCommandMapper.xml new file mode 100644 index 0000000000..278b7cbc08 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ErrorCommandMapper.xml @@ -0,0 +1,19 @@ + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessDefinitionMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessDefinitionMapper.xml new file mode 100644 index 0000000000..e16ae81ea1 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessDefinitionMapper.xml @@ -0,0 +1,56 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapMapper.xml new file mode 100644 index 0000000000..7bffa89941 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapMapper.xml @@ -0,0 +1,26 @@ + + + + + delete + from t_escheduler_relation_process_instance + where parent_process_instance_id=#{parentProcessId} + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapper.xml new file mode 100644 index 0000000000..f956c148b7 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapper.xml @@ -0,0 +1,123 @@ + + + + + + + + update t_escheduler_process_instance + set host=null + where host =#{host} and state in + + #{i} + + + + update t_escheduler_process_instance + set state = #{destState} + where state = #{originState} + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectMapper.xml new file mode 100644 index 0000000000..e34576811d --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectMapper.xml @@ -0,0 +1,65 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectUserMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectUserMapper.xml new file mode 100644 index 0000000000..ad2b9b9343 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectUserMapper.xml @@ -0,0 +1,19 @@ + + + + + delete from t_escheduler_relation_project_user + where 1=1 + and user_id = #{userId} + + and project_id = #{projectId} + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/QueueMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/QueueMapper.xml new file mode 100644 index 0000000000..6eac1d3bc8 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/QueueMapper.xml @@ -0,0 +1,25 @@ + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceMapper.xml new file mode 100644 index 0000000000..acd8376be8 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceMapper.xml @@ -0,0 +1,57 @@ + + + + + + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceUserMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceUserMapper.xml new file mode 100644 index 0000000000..deeadac8d3 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceUserMapper.xml @@ -0,0 +1,15 @@ + + + + + delete + from t_escheduler_relation_resources_user + where 1 = 1 + + and user_id = #{userId} + + + and resources_id = #{resourceId} + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ScheduleMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ScheduleMapper.xml new file mode 100644 index 0000000000..51edd4aad9 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ScheduleMapper.xml @@ -0,0 +1,41 @@ + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/SessionMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/SessionMapper.xml new file mode 100644 index 0000000000..e8fd7f3eb3 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/SessionMapper.xml @@ -0,0 +1,9 @@ + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TaskInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TaskInstanceMapper.xml new file mode 100644 index 0000000000..d60153e1de --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TaskInstanceMapper.xml @@ -0,0 +1,96 @@ + + + + + update t_escheduler_task_instance + set state = #{destStatus} + where host = #{host} and state in (#{states}) + + + + + + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TenantMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TenantMapper.xml new file mode 100644 index 0000000000..91fede5406 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TenantMapper.xml @@ -0,0 +1,24 @@ + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UDFUserMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UDFUserMapper.xml new file mode 100644 index 0000000000..a47c3e4b67 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UDFUserMapper.xml @@ -0,0 +1,12 @@ + + + + + delete from t_escheduler_relation_udfs_user + where `user_id` = #{userId} + + + delete from t_escheduler_relation_udfs_user + where `udf_id` = #{udfFuncId} + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UdfFuncMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UdfFuncMapper.xml new file mode 100644 index 0000000000..c0aed4fc76 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UdfFuncMapper.xml @@ -0,0 +1,51 @@ + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserAlertGroupMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserAlertGroupMapper.xml new file mode 100644 index 0000000000..090388dedf --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserAlertGroupMapper.xml @@ -0,0 +1,20 @@ + + + + + delete from t_escheduler_relation_user_alertgroup + where `alertgroup_id` = #{alertgroupId} + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserMapper.xml new file mode 100644 index 0000000000..39dd9871bc --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserMapper.xml @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/WorkerGroupMapper.xml b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/WorkerGroupMapper.xml new file mode 100644 index 0000000000..0bc7b3a891 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/cn.escheduler.dao.mapper/WorkerGroupMapper.xml @@ -0,0 +1,23 @@ + + + + + + + \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/dao/data_source.properties__ b/dolphinscheduler-dao/src/main/resources/dao/data_source.properties__ similarity index 100% rename from escheduler-dao/src/main/resources/dao/data_source.properties__ rename to dolphinscheduler-dao/src/main/resources/dao/data_source.properties__ diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/cron/CronUtilsTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/cron/CronUtilsTest.java new file mode 100644 index 0000000000..86227e348f --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/cron/CronUtilsTest.java @@ -0,0 +1,181 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.cron; + +import org.apache.dolphinscheduler.common.enums.CycleEnum; +import org.apache.dolphinscheduler.dao.utils.cron.CronUtils; +import com.cronutils.builder.CronBuilder; +import com.cronutils.model.Cron; +import com.cronutils.model.CronType; +import com.cronutils.model.definition.CronDefinitionBuilder; +import com.cronutils.model.field.CronField; +import com.cronutils.model.field.CronFieldName; +import com.cronutils.model.field.expression.*; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.ParseException; + +import static com.cronutils.model.field.expression.FieldExpressionFactory.*; + +/** + */ +public class CronUtilsTest { + + private static final Logger logger = LoggerFactory.getLogger(CronUtilsTest.class); + + @Test + public void cronAsStringTest() { + Cron cron = CronBuilder.cron(CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ)) + .withYear(always()) + .withDoW(questionMark()) + .withMonth(always()) + .withDoM(always()) + .withHour(always()) + .withMinute(every(5)) + .withSecond(on(0)) + .instance(); + // Obtain the string expression + String cronAsString = cron.asString(); // 0 */5 * * * ? * Every five minutes(每5分钟一次) + + Assert.assertEquals(cronAsString, "0 */5 * * * ? *"); + + } + + + @Test + public void testParse() throws ParseException { + String strCrontab = "0 1 2 3 * ? *"; + Cron depCron = CronUtils.parse2Cron(strCrontab); + Assert.assertEquals(depCron.retrieve(CronFieldName.SECOND).getExpression().asString(), "0"); + Assert.assertEquals(depCron.retrieve(CronFieldName.MINUTE).getExpression().asString(), "1"); + Assert.assertEquals(depCron.retrieve(CronFieldName.HOUR).getExpression().asString(), "2"); + Assert.assertEquals(depCron.retrieve(CronFieldName.DAY_OF_MONTH).getExpression().asString(), "3"); + Assert.assertEquals(depCron.retrieve(CronFieldName.MONTH).getExpression().asString(), "*"); + Assert.assertEquals(depCron.retrieve(CronFieldName.YEAR).getExpression().asString(), "*"); + } + + @Test + public void testParse1() throws ParseException { + String strCrontab = "* * 0/1 * * ? *"; + strCrontab = "0/50 0/59 * * * ? *"; + strCrontab = "3/5 * 0/5 * * ? *"; + strCrontab = "1/5 3/5 1/5 3/30 * ? *"; + Cron depCron = CronUtils.parse2Cron(strCrontab); + logger.info(depCron.validate().asString()); + } + + @Test + public void scheduleTypeTest() throws ParseException { + + CycleEnum cycleEnum = CronUtils.getMaxCycle("0 */1 * * * ? *"); + Assert.assertEquals(cycleEnum.name(), "MINUTE"); + + CycleEnum cycleEnum2 = CronUtils.getMaxCycle("0 * * * * ? *"); + Assert.assertEquals(cycleEnum2.name(), "MINUTE"); + } + + @Test + public void test2(){ + Cron cron1 = CronBuilder.cron(CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ)) + .withYear(always()) + .withDoW(questionMark()) + .withMonth(always()) + .withDoM(always()) + .withHour(always()) + .withMinute(every(5)) + .withSecond(on(0)) + .instance(); + + String cronAsString = cron1.asString(); // 0 */5 * * * ? * 每5分钟一次 + //logger.info(cronAsString); + // Obtain the string expression + //String minCrontab = "0 0 * * * ? *"; + //String minCrontab = "0 0 10,14,16 * * ?"; + //String minCrontab = "0 0-5 14 * * ? *"; + //String minCrontab = "0 0 2 ? * SUN *"; + //String minCrontab = "* 0,3 2 SUN * 1#1 *"; + //String minCrontab = "* 0,3 * 1W * ? *"; + //cron = CronUtils.parse2Cron("0 * * * * ? *"); + // 月份周期 + /*String[] cronArayy = new String[]{"* 0,3 * 1W * ? *","* 0 0 1W * ? *", + "0 0 0 L 3/5 ? *","0 0 0 ? 3/5 2/2 *"};*/ + // 分钟周期 + String[] cronArayy = new String[]{"* * * * * ? *","* 0 * * * ? *", + "* 5 * * 3/5 ? *","0 0 * * * ? *"}; + // 周周期 + /*String[] cronArayy = new String[]{"* * * ? * 2/1 *","0 *//*5 * ? * 2/1 *", + "* * *//*5 ? * 2/1 *"};*/ + for(String minCrontab:cronArayy){ + if (!org.quartz.CronExpression.isValidExpression(minCrontab)) { + throw new RuntimeException(minCrontab+" verify failure, cron expression not valid"); + } + Cron cron = CronUtils.parse2Cron(minCrontab); + CronField minField = cron.retrieve(CronFieldName.MINUTE); + logger.info("minField instanceof Between:"+(minField.getExpression() instanceof Between)); + logger.info("minField instanceof Every:"+(minField.getExpression() instanceof Every)); + logger.info("minField instanceof Always:" + (minField.getExpression() instanceof Always)); + logger.info("minField instanceof On:"+(minField.getExpression() instanceof On)); + logger.info("minField instanceof And:"+(minField.getExpression() instanceof And)); + CronField hourField = cron.retrieve(CronFieldName.HOUR); + logger.info("hourField instanceof Between:"+(hourField.getExpression() instanceof Between)); + logger.info("hourField instanceof Always:"+(hourField.getExpression() instanceof Always)); + logger.info("hourField instanceof Every:"+(hourField.getExpression() instanceof Every)); + logger.info("hourField instanceof On:"+(hourField.getExpression() instanceof On)); + logger.info("hourField instanceof And:"+(hourField.getExpression() instanceof And)); + + CronField dayOfMonthField = cron.retrieve(CronFieldName.DAY_OF_MONTH); + logger.info("dayOfMonthField instanceof Between:"+(dayOfMonthField.getExpression() instanceof Between)); + logger.info("dayOfMonthField instanceof Always:"+(dayOfMonthField.getExpression() instanceof Always)); + logger.info("dayOfMonthField instanceof Every:"+(dayOfMonthField.getExpression() instanceof Every)); + logger.info("dayOfMonthField instanceof On:"+(dayOfMonthField.getExpression() instanceof On)); + logger.info("dayOfMonthField instanceof And:"+(dayOfMonthField.getExpression() instanceof And)); + logger.info("dayOfMonthField instanceof QuestionMark:"+(dayOfMonthField.getExpression() instanceof QuestionMark)); + + CronField monthField = cron.retrieve(CronFieldName.MONTH); + logger.info("monthField instanceof Between:"+(monthField.getExpression() instanceof Between)); + logger.info("monthField instanceof Always:"+(monthField.getExpression() instanceof Always)); + logger.info("monthField instanceof Every:"+(monthField.getExpression() instanceof Every)); + logger.info("monthField instanceof On:"+(monthField.getExpression() instanceof On)); + logger.info("monthField instanceof And:"+(monthField.getExpression() instanceof And)); + logger.info("monthField instanceof QuestionMark:"+(monthField.getExpression() instanceof QuestionMark)); + + CronField dayOfWeekField = cron.retrieve(CronFieldName.DAY_OF_WEEK); + logger.info("dayOfWeekField instanceof Between:"+(dayOfWeekField.getExpression() instanceof Between)); + logger.info("dayOfWeekField instanceof Always:"+(dayOfWeekField.getExpression() instanceof Always)); + logger.info("dayOfWeekField instanceof Every:"+(dayOfWeekField.getExpression() instanceof Every)); + logger.info("dayOfWeekField instanceof On:"+(dayOfWeekField.getExpression() instanceof On)); + logger.info("dayOfWeekField instanceof And:"+(dayOfWeekField.getExpression() instanceof And)); + logger.info("dayOfWeekField instanceof QuestionMark:"+(dayOfWeekField.getExpression() instanceof QuestionMark)); + + CronField yearField = cron.retrieve(CronFieldName.YEAR); + + //CycleEnum cycleEnum = CronUtils.getMaxCycle("0 * * * * ? *"); + CycleEnum cycleEnum = CronUtils.getMaxCycle(minCrontab); + if(cycleEnum !=null){ + logger.info(cycleEnum.name()); + }else{ + logger.info("can't get scheduleType"); + } + } + + + + } +} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapperTest.java new file mode 100644 index 0000000000..caa96dfde1 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapperTest.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.AccessToken; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import javax.annotation.Resource; +import java.util.Date; +import java.util.List; + + +@RunWith(SpringRunner.class) +@SpringBootTest +public class AccessTokenMapperTest { + + + @Resource + AccessTokenMapper accessTokenMapper; + + + private AccessToken insertOne(){ + //insertOne + AccessToken accessToken = new AccessToken(); + accessToken.setUserId(4); + accessToken.setToken("hello, access token"); + accessToken.setCreateTime(new Date()); + accessToken.setUpdateTime(new Date()); + accessToken.setExpireTime(new Date()); + accessTokenMapper.insert(accessToken); + return accessToken; + } + + @Test + public void testUpdate(){ + //insertOne + AccessToken accessToken = insertOne(); + //update + accessToken.setToken("hello, token"); + int update = accessTokenMapper.updateById(accessToken); + Assert.assertEquals(update, 1); + accessTokenMapper.deleteById(accessToken.getId()); + } + + @Test + public void testDelete(){ + + AccessToken accessToken = insertOne(); + int delete = accessTokenMapper.deleteById(accessToken.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery(){ + + AccessToken accessToken = insertOne(); + //query + List token = accessTokenMapper.selectList(null); + Assert.assertNotEquals(token.size(), 0); + accessTokenMapper.deleteById(accessToken.getId()); + } + + @Test + public void selectAccessTokenPage() { + AccessToken accessToken = insertOne(); + Page page = new Page(1, 3); + String userName = ""; + IPage accessTokenPage = accessTokenMapper.selectAccessTokenPage(page, userName, 4); + Assert.assertNotEquals(accessTokenPage.getTotal(), 0); + accessTokenMapper.deleteById(accessToken.getId()); + } + + +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapperTest.java new file mode 100644 index 0000000000..e76de96035 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapperTest.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.dao.entity.AlertGroup; +import org.apache.dolphinscheduler.dao.entity.UserAlertGroup; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class AlertGroupMapperTest { + + + @Autowired + AlertGroupMapper alertGroupMapper; + + @Autowired + UserAlertGroupMapper userAlertGroupMapper; + + private AlertGroup insertOne(){ + //insertOne + AlertGroup alertGroup = new AlertGroup(); + alertGroup.setGroupName("alert group 1"); + alertGroup.setDescription("alert test1"); + alertGroup.setGroupType(AlertType.EMAIL); + + alertGroup.setCreateTime(new Date()); + alertGroup.setUpdateTime(new Date()); + alertGroupMapper.insert(alertGroup); + return alertGroup; + } + + @Test + public void testUpdate(){ + //insertOne + AlertGroup alertGroup = insertOne(); + //update + alertGroup.setDescription("hello, ag"); + int update = alertGroupMapper.updateById(alertGroup); + Assert.assertEquals(update, 1); + alertGroupMapper.deleteById(alertGroup.getId()); + } + + @Test + public void testDelete(){ + + AlertGroup alertGroup = insertOne(); + int delete = alertGroupMapper.deleteById(alertGroup.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + AlertGroup alertGroup = insertOne(); + //query + List alertGroups = alertGroupMapper.selectList(null); + Assert.assertNotEquals(alertGroups.size(), 0); + alertGroupMapper.deleteById(alertGroup.getId()); + } + + + @Test + public void testQueryAlertGroupPage() { + AlertGroup alertGroup = insertOne(); + Page page = new Page(1, 3); + IPage accessTokenPage = alertGroupMapper.queryAlertGroupPage(page, + "alert" ); + Assert.assertNotEquals(accessTokenPage.getTotal(), 0); + alertGroupMapper.deleteById(alertGroup.getId()); + } + + @Test + public void testQueryByGroupName() { + + AlertGroup alertGroup = insertOne(); + List alertGroups = alertGroupMapper.queryByGroupName("alert group 1"); + Assert.assertNotEquals(alertGroups.size(), 0); + alertGroupMapper.deleteById(alertGroup.getId()); + } + + @Test + public void testQueryByUserId() { + AlertGroup alertGroup = insertOne(); + UserAlertGroup userAlertGroup = new UserAlertGroup(); + userAlertGroup.setAlertgroupId(alertGroup.getId()); + userAlertGroup.setUserId(4); + userAlertGroupMapper.insert(userAlertGroup); + List alertGroups = alertGroupMapper.queryByUserId(4); + Assert.assertNotEquals(alertGroups.size(), 0); + alertGroupMapper.deleteById(alertGroup.getId()); + userAlertGroupMapper.deleteById(userAlertGroup.getId()); + } + + @Test + public void testQueryByAlertType() { + AlertGroup alertGroup = insertOne(); + List alertGroups = alertGroupMapper.queryByAlertType(AlertType.EMAIL); + Assert.assertNotEquals(alertGroups.size(), 0); + alertGroupMapper.deleteById(alertGroup.getId()); + } + + @Test + public void testQueryAllGroupList() { + AlertGroup alertGroup = insertOne(); + List alertGroups = alertGroupMapper.queryAllGroupList(); + Assert.assertNotEquals(alertGroups.size(), 0); + alertGroupMapper.deleteById(alertGroup.getId()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertMapperTest.java new file mode 100644 index 0000000000..becf68017e --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertMapperTest.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + + + +@RunWith(SpringRunner.class) +@SpringBootTest +public class AlertMapperTest { + + @Autowired + AlertMapper alertMapper; + + private Alert insertOne(){ + //insertOne + Alert alert = new Alert(); + alert.setLog("success"); + alert.setReceivers("xx@aa.com"); + alert.setAlertGroupId(1); + alert.setAlertStatus(AlertStatus.EXECUTION_SUCCESS); + alert.setCreateTime(new Date()); + alert.setUpdateTime(new Date()); + alertMapper.insert(alert); + return alert; + } + + @Test + public void testUpdate(){ + //insertOne + Alert alert = insertOne(); + //update + alert.setTitle("hello"); + int update = alertMapper.updateById(alert); + Assert.assertEquals(update, 1); + alertMapper.deleteById(alert.getId()); + } + + @Test + public void testDelete(){ + + Alert alert = insertOne(); + int delete = alertMapper.deleteById(alert.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + Alert alert = insertOne(); + //query + List alerts = alertMapper.selectList(null); + Assert.assertNotEquals(alerts.size(), 0); + alertMapper.deleteById(alert.getId()); + } + + @Test + public void testListAlertByStatus() { + Alert alert = insertOne(); + //query + List alerts = alertMapper.listAlertByStatus(AlertStatus.EXECUTION_SUCCESS); + Assert.assertNotEquals(alerts.size(), 0); + alertMapper.deleteById(alert.getId()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java new file mode 100644 index 0000000000..2681183709 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java @@ -0,0 +1,153 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.CommandCount; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.common.enums.*; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class CommandMapperTest { + + + @Autowired + CommandMapper commandMapper; + + @Autowired + ProcessDefinitionMapper processDefinitionMapper; + + private Command insertOne(){ + //insertOne + Command command = new Command(); + command.setCommandType(CommandType.START_PROCESS); + command.setProcessDefinitionId(1); + command.setExecutorId(4); + command.setProcessInstancePriority(Priority.MEDIUM); + command.setFailureStrategy(FailureStrategy.CONTINUE); + command.setWorkerGroupId(-1); + command.setWarningGroupId(1); + command.setUpdateTime(new Date()); + commandMapper.insert(command); + return command; + } + + @Test + public void testUpdate(){ + //insertOne + Command command = insertOne(); + //update + command.setStartTime(new Date()); + int update = commandMapper.updateById(command); + Assert.assertEquals(update, 1); + commandMapper.deleteById(command.getId()); + } + + @Test + public void testDelete(){ + + Command Command = insertOne(); + int delete = commandMapper.deleteById(Command.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + Command command = insertOne(); + //query + List commands = commandMapper.selectList(null); + Assert.assertNotEquals(commands.size(), 0); + commandMapper.deleteById(command.getId()); + } + @Test + public void testGetAll() { + Command command = insertOne(); + List commands = commandMapper.selectList(null); + Assert.assertNotEquals(commands.size(), 0); + commandMapper.deleteById(command.getId()); + } + + @Test + public void testGetOneToRun() { + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setReleaseState(ReleaseState.ONLINE); + processDefinition.setName("ut test"); + processDefinition.setProjectId(1); + processDefinition.setFlag(Flag.YES); + processDefinitionMapper.insert(processDefinition); + + Command command = new Command(); + command.setCommandType(CommandType.START_PROCESS); + command.setProcessDefinitionId(processDefinition.getId()); + command.setExecutorId(4); + command.setProcessInstancePriority(Priority.MEDIUM); + command.setFailureStrategy(FailureStrategy.CONTINUE); + command.setWorkerGroupId(-1); + command.setWarningGroupId(1); + command.setUpdateTime(new Date()); + commandMapper.insert(command); + + Command command2 = commandMapper.getOneToRun(); + Assert.assertNotEquals(command2, null); + commandMapper.deleteById(command.getId()); + processDefinitionMapper.deleteById(processDefinition.getId()); + } + + @Test + public void testCountCommandState() { + Command command = insertOne(); + + //insertOne + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setName("def 1"); + processDefinition.setProjectId(1010); + processDefinition.setUserId(101); + processDefinition.setUpdateTime(new Date()); + processDefinition.setCreateTime(new Date()); + processDefinitionMapper.insert(processDefinition); + + command.setProcessDefinitionId(processDefinition.getId()); + commandMapper.updateById(command); + + + List commandCounts = commandMapper.countCommandState( + 4, null, null, new Integer[0] + ); + + Integer[] projectIdArray = new Integer[2]; + projectIdArray[0] = processDefinition.getProjectId(); + projectIdArray[1] = 200; + List commandCounts2 = commandMapper.countCommandState( + 4, null, null, projectIdArray + ); + + commandMapper.deleteById(command.getId()); + processDefinitionMapper.deleteById(processDefinition.getId()); + Assert.assertNotEquals(commandCounts.size(), 0); + Assert.assertNotEquals(commandCounts2.size(), 0); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java new file mode 100644 index 0000000000..1aeccaadf4 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.DatasourceUser; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class DataSourceMapperTest { + + @Autowired + DataSourceMapper dataSourceMapper; + + @Autowired + DataSourceUserMapper dataSourceUserMapper; + + private DataSource insertOne(){ + //insertOne + DataSource dataSource = new DataSource(); + dataSource.setUserId(4); + dataSource.setName("data source test"); + dataSource.setType(DbType.MYSQL); + dataSource.setNote("mysql test"); + dataSource.setConnectionParams("hello mysql"); + dataSource.setUpdateTime(new Date()); + dataSource.setCreateTime(new Date()); + dataSourceMapper.insert(dataSource); + return dataSource; + } + + @Test + public void testUpdate(){ + //insertOne + DataSource dataSource = insertOne(); + //update + dataSource.setUpdateTime(new Date()); + int update = dataSourceMapper.updateById(dataSource); + Assert.assertEquals(update, 1); + dataSourceMapper.deleteById(dataSource.getId()); + } + + @Test + public void testDelete(){ + + DataSource dataSource = insertOne(); + int delete = dataSourceMapper.deleteById(dataSource.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + DataSource dataSource = insertOne(); + //query + List dataSources = dataSourceMapper.selectList(null); + Assert.assertNotEquals(dataSources.size(), 0); + dataSourceMapper.deleteById(dataSource.getId()); + } + + @Test + public void testQueryDataSourceByType() { + DataSource dataSource = insertOne(); + //query + List dataSources = dataSourceMapper.queryDataSourceByType( + 0, DbType.MYSQL.ordinal() + ); + Assert.assertNotEquals(dataSources.size(), 0); + dataSourceMapper.deleteById(dataSource.getId()); + } + + @Test + public void testSelectPaging() { + DataSource dataSource = insertOne(); + Page page = new Page(1, 3); + IPage dataSourceIPage = dataSourceMapper.selectPaging(page, + 4, null); + Assert.assertNotEquals(dataSourceIPage.getTotal(), 0); + dataSourceMapper.deleteById(dataSource.getId()); + } + + @Test + public void testQueryDataSourceByName() { + DataSource dataSource = insertOne(); + List dataSources = dataSourceMapper.queryDataSourceByName("data source test"); + Assert.assertNotEquals(dataSources.size(), 0); + dataSourceMapper.deleteById(dataSource.getId()); + } + + @Test + public void testQueryAuthedDatasource() { + + DataSource dataSource = insertOne(); + DatasourceUser datasourceUser = new DatasourceUser(); + datasourceUser.setUserId(3); + datasourceUser.setDatasourceId(dataSource.getId()); + dataSourceUserMapper.insert(datasourceUser); + + List dataSources = dataSourceMapper.queryAuthedDatasource(3); + Assert.assertNotEquals(dataSources.size(), 0); + dataSourceMapper.deleteById(dataSource.getId()); + dataSourceUserMapper.deleteById(datasourceUser.getId()); + } + + @Test + public void testQueryDatasourceExceptUserId() { + DataSource dataSource = insertOne(); + List dataSources = dataSourceMapper.queryDatasourceExceptUserId(3); + Assert.assertNotEquals(dataSources.size(), 0); + dataSourceMapper.deleteById(dataSource.getId()); + } + + @Test + public void testListAllDataSourceByType() { + + DataSource dataSource = insertOne(); + + List dataSources = dataSourceMapper.queryDataSourceByType(4, DbType.MYSQL.ordinal()); + Assert.assertNotEquals(dataSources.size(), 0); + List dataSources2 = dataSourceMapper.queryDataSourceByType(10091, DbType.MYSQL.ordinal()); + Assert.assertEquals(dataSources2.size(), 0); + dataSourceMapper.deleteById(dataSource.getId()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapperTest.java new file mode 100644 index 0000000000..42a024c32c --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapperTest.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.dao.entity.DatasourceUser; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class DataSourceUserMapperTest { + + @Autowired + DataSourceUserMapper dataSourceUserMapper; + + + private DatasourceUser insertOne(){ + //insertOne + DatasourceUser dataSourceUser = new DatasourceUser(); + dataSourceUser.setUserId(4); + dataSourceUser.setDatasourceId(1010); + dataSourceUser.setPerm(7); + dataSourceUser.setUpdateTime(new Date()); + dataSourceUser.setCreateTime(new Date()); + return dataSourceUser; + } + + @Test + public void testUpdate(){ + //insertOne + DatasourceUser dataSourceUser = insertOne(); + //update + dataSourceUser.setUpdateTime(new Date()); + int update = dataSourceUserMapper.updateById(dataSourceUser); + Assert.assertEquals(update, 1); + dataSourceUserMapper.deleteById(dataSourceUser.getId()); + } + + @Test + public void testDelete(){ + + DatasourceUser dataSourceUser = insertOne(); + int delete = dataSourceUserMapper.deleteById(dataSourceUser.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + DatasourceUser dataSourceUser = insertOne(); + //query + List dataSources = dataSourceUserMapper.selectList(null); + Assert.assertNotEquals(dataSources.size(), 0); + dataSourceUserMapper.deleteById(dataSourceUser.getId()); + } + + @Test + public void testDeleteByUserId() { + DatasourceUser dataSourceUser = insertOne(); + int delete = dataSourceUserMapper.deleteByUserId(dataSourceUser.getUserId()); + Assert.assertNotEquals(delete, 0); + } + + @Test + public void testDeleteByDatasourceId() { + DatasourceUser dataSourceUser = insertOne(); + int delete = dataSourceUserMapper.deleteByDatasourceId(dataSourceUser.getDatasourceId()); + Assert.assertNotEquals(delete, 0); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java new file mode 100644 index 0000000000..477ff2d68f --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.dao.entity.CommandCount; +import org.apache.dolphinscheduler.dao.entity.ErrorCommand; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class ErrorCommandMapperTest { + + @Autowired + ErrorCommandMapper errorCommandMapper; + + @Autowired + ProcessDefinitionMapper processDefinitionMapper; + + + private ErrorCommand insertOne(){ + //insertOne + ErrorCommand errorCommand = new ErrorCommand(); + errorCommand.setId(10101); + errorCommand.setCommandType(CommandType.START_PROCESS); + errorCommand.setUpdateTime(new Date()); + errorCommand.setStartTime(new Date()); + errorCommandMapper.insert(errorCommand); + return errorCommand; + } + + @Test + public void testUpdate(){ + //insertOne + ErrorCommand errorCommand = insertOne(); + //update + errorCommand.setUpdateTime(new Date()); + int update = errorCommandMapper.updateById(errorCommand); + Assert.assertEquals(update, 1); + errorCommandMapper.deleteById(errorCommand.getId()); + } + + @Test + public void testDelete(){ + + ErrorCommand errorCommand = insertOne(); + int delete = errorCommandMapper.deleteById(errorCommand.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + errorCommandMapper.delete(null); + + ErrorCommand errorCommand = insertOne(); + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setName("def 1"); + processDefinition.setProjectId(1010); + processDefinition.setUserId(101); + processDefinition.setUpdateTime(new Date()); + processDefinition.setCreateTime(new Date()); + processDefinitionMapper.insert(processDefinition); + + errorCommand.setProcessDefinitionId(processDefinition.getId()); + errorCommandMapper.updateById(errorCommand); + + + List commandCounts = errorCommandMapper.countCommandState( + null, + null, + new Integer[0] + ); + + Integer[] projectIdArray = new Integer[2]; + projectIdArray[0] = processDefinition.getProjectId(); + projectIdArray[1] = 200; + List commandCounts2 = errorCommandMapper.countCommandState( + null, + null, + projectIdArray + ); + + errorCommandMapper.deleteById(errorCommand.getId()); + processDefinitionMapper.deleteById(processDefinition.getId()); + Assert.assertNotEquals(commandCounts.size(), 0); + Assert.assertNotEquals(commandCounts2.size(), 0); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java new file mode 100644 index 0000000000..e7c1e55011 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java @@ -0,0 +1,197 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.common.enums.UserType; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.dolphinscheduler.dao.entity.*; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class ProcessDefinitionMapperTest { + + + @Autowired + ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + UserMapper userMapper; + + @Autowired + QueueMapper queueMapper; + + @Autowired + TenantMapper tenantMapper; + + @Autowired + ProjectMapper projectMapper; + + private ProcessDefinition insertOne(){ + //insertOne + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setName("def 1"); + processDefinition.setProjectId(1010); + processDefinition.setUserId(101); + processDefinition.setUpdateTime(new Date()); + processDefinition.setCreateTime(new Date()); + processDefinitionMapper.insert(processDefinition); + return processDefinition; + } + + @Test + public void testUpdate(){ + //insertOne + ProcessDefinition processDefinition = insertOne(); + //update + processDefinition.setUpdateTime(new Date()); + int update = processDefinitionMapper.updateById(processDefinition); + Assert.assertEquals(update, 1); + processDefinitionMapper.deleteById(processDefinition.getId()); + } + + @Test + public void testDelete(){ + ProcessDefinition processDefinition = insertOne(); + int delete = processDefinitionMapper.deleteById(processDefinition.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + ProcessDefinition processDefinition = insertOne(); + //query + List dataSources = processDefinitionMapper.selectList(null); + Assert.assertNotEquals(dataSources.size(), 0); + processDefinitionMapper.deleteById(processDefinition.getId()); + } + + @Test + public void testQueryByDefineName() { + Project project = new Project(); + project.setName("ut project"); + project.setUserId(4); + projectMapper.insert(project); + + Queue queue = new Queue(); + queue.setQueue("queue"); + queue.setQueueName("queue name"); + queueMapper.insert(queue); + + Tenant tenant = new Tenant(); + tenant.setTenantCode("tenant"); + tenant.setQueueId(queue.getId()); + tenant.setDescription("t"); + tenantMapper.insert(tenant); + + User user = new User(); + user.setUserName("hello"); + user.setUserPassword("pwd"); + user.setUserType(UserType.GENERAL_USER); + user.setTenantId(tenant.getId()); + userMapper.insert(user); + + //insertOne + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setName("def 1"); + processDefinition.setProjectId(project.getId()); + processDefinition.setUpdateTime(new Date()); + processDefinition.setCreateTime(new Date()); + processDefinition.setTenantId(tenant.getId()); + processDefinition.setUserId(user.getId()); + processDefinitionMapper.insert(processDefinition); + + ProcessDefinition processDefinition1 = processDefinitionMapper.queryByDefineName(project.getId(), "def 1"); + Assert.assertNotEquals(processDefinition1, null); + processDefinitionMapper.deleteById(processDefinition.getId()); + queueMapper.deleteById(queue.getId()); + projectMapper.deleteById(project.getId()); + tenantMapper.deleteById(tenant.getId()); + userMapper.deleteById(user.getId()); + } + + @Test + public void testQueryDefineListPaging() { + ProcessDefinition processDefinition = insertOne(); + Page page = new Page(1,3); + IPage processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(page, "def", 101, 1010); + Assert.assertNotEquals(processDefinitionIPage.getTotal(), 0); + processDefinitionMapper.deleteById(processDefinition.getId()); + } + + @Test + public void testQueryAllDefinitionList() { + ProcessDefinition processDefinition = insertOne(); + List processDefinitionIPage = processDefinitionMapper.queryAllDefinitionList(1010); + Assert.assertNotEquals(processDefinitionIPage.size(), 0); + processDefinitionMapper.deleteById(processDefinition.getId()); + } + + @Test + public void testQueryDefinitionListByIdList() { + + ProcessDefinition processDefinition = insertOne(); + ProcessDefinition processDefinition1 = insertOne(); + + Integer[] array = new Integer[2]; + array[0] = processDefinition.getId(); + array[1] = processDefinition1.getId(); + + List processDefinitions = processDefinitionMapper.queryDefinitionListByIdList(array); + processDefinitionMapper.deleteById(processDefinition.getId()); + processDefinitionMapper.deleteById(processDefinition1.getId()); + Assert.assertEquals(processDefinitions.size(), 2); + + } + + @Test + public void testCountDefinitionGroupByUser() { + + User user= new User(); + user.setUserName("user1"); + user.setUserPassword("1"); + user.setEmail("xx@123.com"); + user.setUserType(UserType.GENERAL_USER); + user.setCreateTime(new Date()); + user.setTenantId(1); + user.setUpdateTime(new Date()); + userMapper.insert(user); + + ProcessDefinition processDefinition = insertOne(); + processDefinition.setUserId(user.getId()); + processDefinitionMapper.updateById(processDefinition); + + Integer[] projectIds = new Integer[1]; + projectIds[0] = processDefinition.getProjectId(); + List processDefinitions = processDefinitionMapper.countDefinitionGroupByUser( + processDefinition.getUserId(), + projectIds + ); + processDefinitionMapper.deleteById(processDefinition.getId()); + Assert.assertNotEquals(processDefinitions.size(), 0); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapperTest.java new file mode 100644 index 0000000000..40ff56c562 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapperTest.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class ProcessInstanceMapMapperTest { + + + @Autowired + ProcessInstanceMapMapper processInstanceMapMapper; + + + private ProcessInstanceMap insertOne(){ + //insertOne + ProcessInstanceMap processInstanceMap = new ProcessInstanceMap(); + processInstanceMap.setProcessInstanceId(0); + processInstanceMap.setParentTaskInstanceId(0); + processInstanceMap.setParentProcessInstanceId(0); + processInstanceMapMapper.insert(processInstanceMap); + return processInstanceMap; + } + + @Test + public void testUpdate(){ + //insertOne + ProcessInstanceMap processInstanceMap = insertOne(); + //update + processInstanceMap.setParentProcessInstanceId(1); + int update = processInstanceMapMapper.updateById(processInstanceMap); + Assert.assertEquals(update, 1); + processInstanceMapMapper.deleteById(processInstanceMap.getId()); + } + + @Test + public void testDelete(){ + ProcessInstanceMap processInstanceMap = insertOne(); + int delete = processInstanceMapMapper.deleteById(processInstanceMap.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + ProcessInstanceMap processInstanceMap = insertOne(); + //query + List dataSources = processInstanceMapMapper.selectList(null); + Assert.assertNotEquals(dataSources.size(), 0); + processInstanceMapMapper.deleteById(processInstanceMap.getId()); + } + + @Test + public void testQueryByParentId() { + ProcessInstanceMap processInstanceMap = insertOne(); + + processInstanceMap.setParentProcessInstanceId(100); + processInstanceMapMapper.updateById(processInstanceMap); + ProcessInstanceMap map = + processInstanceMapMapper.queryByParentId(processInstanceMap.getParentProcessInstanceId(), processInstanceMap.getParentTaskInstanceId()); + Assert.assertNotEquals(map, null); + + + processInstanceMapMapper.deleteById(processInstanceMap.getId()); + } + + @Test + public void testQueryBySubProcessId() { + ProcessInstanceMap processInstanceMap = insertOne(); + + processInstanceMap.setProcessInstanceId(100); + processInstanceMapMapper.updateById(processInstanceMap); + ProcessInstanceMap map = + processInstanceMapMapper.queryBySubProcessId( + processInstanceMap.getProcessInstanceId() ); + Assert.assertNotEquals(map, null); + + processInstanceMapMapper.deleteById(processInstanceMap.getId()); + } + + @Test + public void testDeleteByParentProcessId() { + ProcessInstanceMap processInstanceMap = insertOne(); + + processInstanceMap.setParentProcessInstanceId(100); + processInstanceMapMapper.updateById(processInstanceMap); + int delete = processInstanceMapMapper.deleteByParentProcessId( + processInstanceMap.getParentProcessInstanceId() + ); + Assert.assertEquals(delete, 1); + } + + @Test + public void querySubIdListByParentId() { + ProcessInstanceMap processInstanceMap = insertOne(); + processInstanceMap.setProcessInstanceId(1); + processInstanceMap.setParentProcessInstanceId(1010); + + processInstanceMapMapper.updateById(processInstanceMap); + + List subIds = processInstanceMapMapper.querySubIdListByParentId(processInstanceMap.getParentProcessInstanceId()); + + Assert.assertNotEquals(subIds.size(), 0); + + processInstanceMapMapper.deleteById(processInstanceMap.getId()); + + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java new file mode 100644 index 0000000000..626a774961 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java @@ -0,0 +1,289 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class ProcessInstanceMapperTest { + + + @Autowired + ProcessInstanceMapper processInstanceMapper; + + @Autowired + ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + ProjectMapper projectMapper; + + + private ProcessInstance insertOne(){ + //insertOne + ProcessInstance processInstance = new ProcessInstance(); + Date start = new Date(2019-1900, 1-1, 1, 0, 10,0); + Date end = new Date(2019-1900, 1-1, 1, 1, 0,0); + processInstance.setStartTime(start); + processInstance.setEndTime(end); + processInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); + + processInstanceMapper.insert(processInstance); + return processInstance; + } + + @Test + public void testUpdate(){ + //insertOne + ProcessInstance processInstanceMap = insertOne(); + //update + int update = processInstanceMapper.updateById(processInstanceMap); + Assert.assertEquals(update, 1); + processInstanceMapper.deleteById(processInstanceMap.getId()); + } + + @Test + public void testDelete(){ + ProcessInstance processInstanceMap = insertOne(); + int delete = processInstanceMapper.deleteById(processInstanceMap.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + ProcessInstance processInstance = insertOne(); + //query + List dataSources = processInstanceMapper.selectList(null); + Assert.assertNotEquals(dataSources.size(), 0); + processInstanceMapper.deleteById(processInstance.getId()); + } + + @Test + public void testQueryDetailById() { + ProcessInstance processInstance = insertOne(); + processInstanceMapper.updateById(processInstance); + + ProcessInstance processInstance1 = processInstanceMapper.queryDetailById(processInstance.getId()); + Assert.assertNotEquals(processInstance1, 50); + processInstanceMapper.deleteById(processInstance.getId()); + } + + @Test + public void testQueryByHostAndStatus() { + ProcessInstance processInstance = insertOne(); + processInstance.setHost("192.168.2.155"); + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + processInstanceMapper.updateById(processInstance); + + int[] stateArray = new int[]{ + ExecutionStatus.RUNNING_EXEUTION.ordinal(), + ExecutionStatus.SUCCESS.ordinal()}; + + processInstanceMapper.queryByHostAndStatus(processInstance.getHost(), stateArray); + + processInstanceMapper.deleteById(processInstance.getId()); + } + + @Test + public void testQueryProcessInstanceListPaging() { + + + int[] stateArray = new int[]{ + ExecutionStatus.RUNNING_EXEUTION.ordinal(), + ExecutionStatus.SUCCESS.ordinal()}; + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setProjectId(1010); + processDefinition.setReleaseState(ReleaseState.ONLINE); + processDefinitionMapper.insert(processDefinition); + + ProcessInstance processInstance = insertOne(); + processInstance.setProcessDefinitionId(processDefinition.getId()); + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + processInstance.setIsSubProcess(Flag.NO); + processInstance.setStartTime(new Date()); + + processInstanceMapper.updateById(processInstance); + + + Page page = new Page(1, 3); + + IPage processInstanceIPage = processInstanceMapper.queryProcessInstanceListPaging( + page, + processDefinition.getProjectId(), + processInstance.getProcessDefinitionId(), + processInstance.getName(), + stateArray, + processInstance.getHost(), + null, + null + ); + Assert.assertNotEquals(processInstanceIPage.getTotal(), 0); + + processDefinitionMapper.deleteById(processDefinition.getId()); + processInstanceMapper.deleteById(processInstance.getId()); + } + + @Test + public void testSetFailoverByHostAndStateArray() { + + int[] stateArray = new int[]{ + ExecutionStatus.RUNNING_EXEUTION.ordinal(), + ExecutionStatus.SUCCESS.ordinal()}; + + ProcessInstance processInstance = insertOne(); + + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + processInstance.setHost("192.168.2.220"); + processInstanceMapper.updateById(processInstance); + String host = processInstance.getHost(); + int update = processInstanceMapper.setFailoverByHostAndStateArray(host, stateArray); + Assert.assertNotEquals(update, 0); + + processInstance = processInstanceMapper.selectById(processInstance.getId()); + Assert.assertEquals(processInstance.getHost(), null); + processInstanceMapper.deleteById(processInstance.getId()); + } + + @Test + public void testUpdateProcessInstanceByState() { + + + ProcessInstance processInstance = insertOne(); + + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + processInstanceMapper.updateById(processInstance); + processInstanceMapper.updateProcessInstanceByState(ExecutionStatus.RUNNING_EXEUTION, ExecutionStatus.SUCCESS); + + ProcessInstance processInstance1 = processInstanceMapper.selectById(processInstance.getId()); + + processInstanceMapper.deleteById(processInstance.getId()); + Assert.assertEquals(processInstance1.getState(), ExecutionStatus.SUCCESS); + + } + + @Test + public void testCountInstanceStateByUser() { + + processDefinitionMapper.delete(null); + processInstanceMapper.delete(null); + + Project project = new Project(); + project.setName("testProject"); + projectMapper.insert(project); + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setProjectId(project.getId()); + + processDefinitionMapper.insert(processDefinition); + ProcessInstance processInstance = insertOne(); + processInstance.setProcessDefinitionId(processDefinition.getId()); + int update = processInstanceMapper.updateById(processInstance); + + Integer[] projectIds = new Integer[]{processDefinition.getProjectId()}; + + List executeStatusCounts = processInstanceMapper.countInstanceStateByUser(null, null, projectIds); + + + Assert.assertNotEquals(executeStatusCounts.size(), 0); + + projectMapper.deleteById(project.getId()); + processDefinitionMapper.deleteById(processDefinition.getId()); + processInstanceMapper.deleteById(processInstance.getId()); + } + + @Test + public void testQueryByProcessDefineId() { + ProcessInstance processInstance = insertOne(); + ProcessInstance processInstance1 = insertOne(); + + + List processInstances = processInstanceMapper.queryByProcessDefineId(processInstance.getProcessDefinitionId(), 1); + Assert.assertEquals(processInstances.size(), 1); + + processInstances = processInstanceMapper.queryByProcessDefineId(processInstance.getProcessDefinitionId(), 2); + Assert.assertEquals(processInstances.size(), 2); + + processInstanceMapper.deleteById(processInstance.getId()); + processInstanceMapper.deleteById(processInstance1.getId()); + } + + @Test + public void testQueryLastSchedulerProcess() { + ProcessInstance processInstance = insertOne(); + processInstance.setScheduleTime(new Date()); + processInstanceMapper.updateById(processInstance); + + ProcessInstance processInstance1 = processInstanceMapper.queryLastSchedulerProcess(processInstance.getProcessDefinitionId(), null, null ); + Assert.assertNotEquals(processInstance1, null); + processInstanceMapper.deleteById(processInstance.getId()); + } + + @Test + public void testQueryLastRunningProcess() { + ProcessInstance processInstance = insertOne(); + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + processInstanceMapper.updateById(processInstance); + + int[] stateArray = new int[]{ + ExecutionStatus.RUNNING_EXEUTION.ordinal(), + ExecutionStatus.SUBMITTED_SUCCESS.ordinal()}; + + ProcessInstance processInstance1 = processInstanceMapper.queryLastRunningProcess(processInstance.getProcessDefinitionId(), null, null , stateArray); + + Assert.assertNotEquals(processInstance1, null); + processInstanceMapper.deleteById(processInstance.getId()); + } + + @Test + public void testQueryLastManualProcess() { + ProcessInstance processInstance = insertOne(); + processInstanceMapper.updateById(processInstance); + + Date start = new Date(2019-1900, 1-1, 01, 0, 0, 0); + Date end = new Date(2019-1900, 1-1, 01, 5, 0, 0); + ProcessInstance processInstance1 = processInstanceMapper.queryLastManualProcess(processInstance.getProcessDefinitionId(),start, end + ); + Assert.assertEquals(processInstance1.getId(), processInstance.getId()); + + start = new Date(2019-1900, 1-1, 01, 1, 0, 0); + processInstance1 = processInstanceMapper.queryLastManualProcess(processInstance.getProcessDefinitionId(),start, end + ); + Assert.assertEquals(processInstance1, null); + + processInstanceMapper.deleteById(processInstance.getId()); + + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapperTest.java new file mode 100644 index 0000000000..00ef3aa8c8 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapperTest.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class ProjectMapperTest { + + @Test + public void testQueryDetailById() { + } + + @Test + public void testQueryProjectByName() { + } + + @Test + public void testQueryProjectListPaging() { + } + + @Test + public void testQueryAllProjectListPaging() { + } + + @Test + public void testQueryProjectCreatedByUser() { + } + + @Test + public void testQueryAuthedProjectListByUserId() { + } + + @Test + public void testQueryProjectExceptUserId() { + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java new file mode 100644 index 0000000000..89b11c794b --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.dao.entity.ProjectUser; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class ProjectUserMapperTest { + + + @Autowired + ProjectUserMapper projectUserMapper; + + private ProjectUser insertOne(){ + //insertOne + ProjectUser projectUser = new ProjectUser(); + projectUser.setProjectId(1010); + projectUser.setUserId(111); + projectUserMapper.insert(projectUser); + return projectUser; + } + + @Test + public void testUpdate(){ + //insertOne + ProjectUser projectUser = insertOne(); + projectUser.setCreateTime(new Date()); + //update + int update = projectUserMapper.updateById(projectUser); + Assert.assertEquals(update, 1); + projectUserMapper.deleteById(projectUser.getId()); + } + + @Test + public void testDelete(){ + ProjectUser projectUserMap = insertOne(); + int delete = projectUserMapper.deleteById(projectUserMap.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + ProjectUser projectUser = insertOne(); + //query + List projectUsers = projectUserMapper.selectList(null); + Assert.assertNotEquals(projectUsers.size(), 0); + projectUserMapper.deleteById(projectUser.getId()); + } + + @Test + public void testDeleteProjectRelation() { + + + ProjectUser projectUser = insertOne(); + int delete = projectUserMapper.deleteProjectRelation(projectUser.getProjectId(), projectUser.getUserId()); + Assert.assertEquals(delete, 1); + + } + + @Test + public void testQueryProjectRelation() { + ProjectUser projectUser = insertOne(); + ProjectUser projectUser1 = projectUserMapper.queryProjectRelation(projectUser.getProjectId(), projectUser.getUserId()); + Assert.assertNotEquals(projectUser1, null); + + projectUserMapper.deleteById(projectUser.getId()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java new file mode 100644 index 0000000000..1539d4ce96 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.dao.entity.Queue; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class QueueMapperTest { + + + @Autowired + QueueMapper queueMapper; + + + private Queue insertOne(){ + //insertOne + Queue queue = new Queue(); + queue.setQueueName("queue"); + queue.setQueue("queue"); + queue.setCreateTime(new Date()); + queue.setUpdateTime(new Date()); + queueMapper.insert(queue); + return queue; + } + + @Test + public void testUpdate(){ + //insertOne + Queue queue = insertOne(); + queue.setCreateTime(new Date()); + //update + int update = queueMapper.updateById(queue); + Assert.assertEquals(update, 1); + queueMapper.deleteById(queue.getId()); + } + + @Test + public void testDelete(){ + Queue queue = insertOne(); + int delete = queueMapper.deleteById(queue.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + Queue queue = insertOne(); + //query + List queues = queueMapper.selectList(null); + Assert.assertNotEquals(queues.size(), 0); + queueMapper.deleteById(queue.getId()); + } + + @Test + public void testQueryQueuePaging() { + + Queue queue = insertOne(); + Page page = new Page(1,3); + + IPage queueIPage= queueMapper.queryQueuePaging(page, + null); + Assert.assertNotEquals(queueIPage.getTotal(), 0); + + queueIPage= queueMapper.queryQueuePaging(page, + queue.getQueueName()); + Assert.assertNotEquals(queueIPage.getTotal(), 0); + queueMapper.deleteById(queue.getId()); + } + + @Test + public void queryAllQueueList() { + Queue queue = insertOne(); + + List queues = queueMapper.queryAllQueueList(queue.getQueue(), null); + Assert.assertNotEquals(queues.size(), 0); + + queues = queueMapper.queryAllQueueList(null, queue.getQueueName()); + Assert.assertNotEquals(queues.size(), 0); + queueMapper.deleteById(queue.getId()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java new file mode 100644 index 0000000000..278195fe46 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class ResourceMapperTest { + + @Test + public void testQueryResourceList() { + } + + @Test + public void testQueryResourcePaging() { + } + + @Test + public void testQueryResourceListAuthored() { + } + + @Test + public void testQueryAuthorizedResourceList() { + } + + @Test + public void testQueryResourceExceptUserId() { + } + + @Test + public void testQueryTenantCodeByResourceName() { + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapperTest.java new file mode 100644 index 0000000000..12f9b4b31f --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapperTest.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.dao.entity.ResourcesUser; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class ResourceUserMapperTest { + + + + @Autowired + ResourceUserMapper resourceUserMapper; + + private ResourcesUser insertOne(){ + //insertOne + ResourcesUser queue = new ResourcesUser(); + queue.setCreateTime(new Date()); + queue.setUpdateTime(new Date()); + queue.setUserId(11111); + queue.setResourcesId(1110); + resourceUserMapper.insert(queue); + return queue; + } + + @Test + public void testUpdate(){ + //insertOne + ResourcesUser queue = insertOne(); + queue.setCreateTime(new Date()); + //update + int update = resourceUserMapper.updateById(queue); + Assert.assertEquals(update, 1); + resourceUserMapper.deleteById(queue.getId()); + } + + @Test + public void testDelete(){ + ResourcesUser queue = insertOne(); + int delete = resourceUserMapper.deleteById(queue.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + ResourcesUser queue = insertOne(); + //query + List queues = resourceUserMapper.selectList(null); + Assert.assertNotEquals(queues.size(), 0); + resourceUserMapper.deleteById(queue.getId()); + } + + @Test + public void testDeleteResourceUser() { + + ResourcesUser queue = insertOne(); + int delete = resourceUserMapper.deleteResourceUser( + queue.getUserId(), + queue.getResourcesId()); + Assert.assertNotEquals(delete, 0); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java new file mode 100644 index 0000000000..b3ce4c4fce --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java @@ -0,0 +1,193 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.User; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class ScheduleMapperTest { + + + @Autowired + ScheduleMapper scheduleMapper; + + @Autowired + UserMapper userMapper; + + @Autowired + ProjectMapper projectMapper; + + @Autowired + ProcessDefinitionMapper processDefinitionMapper; + + private Schedule insertOne(){ + //insertOne + Schedule schedule = new Schedule(); + schedule.setStartTime(new Date()); + schedule.setEndTime(new Date()); + schedule.setCrontab(""); + schedule.setFailureStrategy(FailureStrategy.CONTINUE); + schedule.setReleaseState(ReleaseState.OFFLINE); + schedule.setWarningType(WarningType.NONE); + schedule.setCreateTime(new Date()); + schedule.setUpdateTime(new Date()); + scheduleMapper.insert(schedule); + return schedule; + } + + @Test + public void testUpdate(){ + //insertOne + Schedule schedule = insertOne(); + schedule.setCreateTime(new Date()); + //update + int update = scheduleMapper.updateById(schedule); + Assert.assertEquals(update, 1); + scheduleMapper.deleteById(schedule.getId()); + } + + @Test + public void testDelete(){ + Schedule schedule = insertOne(); + int delete = scheduleMapper.deleteById(schedule.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + Schedule schedule = insertOne(); + //query + List schedules = scheduleMapper.selectList(null); + Assert.assertNotEquals(schedules.size(), 0); + scheduleMapper.deleteById(schedule.getId()); + } + + @Test + public void testQueryByProcessDefineIdPaging() { + + User user = new User(); + user.setUserName("ut name"); + userMapper.insert(user); + + Project project = new Project(); + project.setName("ut project"); + project.setUserId(user.getId()); + projectMapper.insert(project); + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setProjectId(project.getId()); + processDefinition.setUserId(user.getId()); + processDefinition.setLocations(""); + processDefinitionMapper.insert(processDefinition); + + Schedule schedule= insertOne(); + schedule.setUserId(user.getId()); + schedule.setProcessDefinitionId(processDefinition.getId()); + scheduleMapper.insert(schedule); + + Page page = new Page(1,3); + IPage scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging(page, + processDefinition.getId(), "" + ); + Assert.assertNotEquals(scheduleIPage.getSize(), 0); + + + projectMapper.deleteById(project.getId()); + processDefinitionMapper.deleteById(processDefinition.getId()); + userMapper.deleteById(user.getId()); + scheduleMapper.deleteById(schedule.getId()); + } + + @Test + public void testQuerySchedulerListByProjectName() { + + + User user = new User(); + user.setUserName("ut name"); + userMapper.insert(user); + + Project project = new Project(); + project.setName("ut project"); + project.setUserId(user.getId()); + projectMapper.insert(project); + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setProjectId(project.getId()); + processDefinition.setUserId(user.getId()); + processDefinition.setLocations(""); + processDefinitionMapper.insert(processDefinition); + + Schedule schedule= insertOne(); + schedule.setUserId(user.getId()); + schedule.setProcessDefinitionId(processDefinition.getId()); + scheduleMapper.insert(schedule); + + Page page = new Page(1,3); + List schedules = scheduleMapper.querySchedulerListByProjectName( + project.getName() + ); + projectMapper.deleteById(project.getId()); + processDefinitionMapper.deleteById(processDefinition.getId()); + userMapper.deleteById(user.getId()); + scheduleMapper.deleteById(schedule.getId()); + + Assert.assertNotEquals(schedules.size(), 0); + } + + @Test + public void testSelectAllByProcessDefineArray() { + + Schedule schedule = insertOne(); + schedule.setProcessDefinitionId(12345); + schedule.setReleaseState(ReleaseState.ONLINE); + scheduleMapper.updateById(schedule); + + List schedules= scheduleMapper.selectAllByProcessDefineArray(new int[] {schedule.getProcessDefinitionId()}); + scheduleMapper.deleteById(schedule.getId()); + Assert.assertNotEquals(schedules.size(), 0); + } + + @Test + public void queryByProcessDefinitionId() { + Schedule schedule = insertOne(); + schedule.setProcessDefinitionId(12345); + scheduleMapper.updateById(schedule); + + List schedules= scheduleMapper.queryByProcessDefinitionId(schedule.getProcessDefinitionId()); + scheduleMapper.deleteById(schedule.getId()); + Assert.assertNotEquals(schedules.size(), 0); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/SessionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/SessionMapperTest.java new file mode 100644 index 0000000000..ca16ee07d4 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/SessionMapperTest.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.dao.entity.Session; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class SessionMapperTest { + + @Autowired + SessionMapper sessionMapper; + + private Session insertOne(){ + //insertOne + Session session = new Session(); + session.setLastLoginTime(new Date()); + session.setUserId(11111); + sessionMapper.insert(session); + return session; + } + + @Test + public void testUpdate(){ + //insertOne + Session session = insertOne(); + session.setLastLoginTime(new Date()); + //update + int update = sessionMapper.updateById(session); + Assert.assertEquals(update, 1); + sessionMapper.deleteById(session.getId()); + } + + @Test + public void testDelete(){ + Session session = insertOne(); + int delete = sessionMapper.deleteById(session.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + Session session = insertOne(); + //query + List sessions = sessionMapper.selectList(null); + Assert.assertNotEquals(sessions.size(), 0); + sessionMapper.deleteById(session.getId()); + } + + @Test + public void testQueryByUserId() { + Session session = insertOne(); + List sessions = sessionMapper.queryByUserId(session.getUserId()); + Assert.assertNotEquals(sessions.size(), 0); + + sessionMapper.deleteById(session.getId()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java new file mode 100644 index 0000000000..591a5b2e9b --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class TaskInstanceMapperTest { + + @Test + public void testQueryTaskByProcessIdAndState() { + } + + @Test + public void testQueryById() { + } + + @Test + public void testFindValidTaskListByProcessId() { + } + + @Test + public void testQueryByHostAndStatus() { + } + + @Test + public void testSetFailoverByHostAndStateArray() { + } + + @Test + public void testQueryByInstanceIdAndName() { + } + + @Test + public void testCountTask() { + } + + @Test + public void testCountTaskInstanceStateByUser() { + } + + @Test + public void testQueryTaskInstanceListPaging() { + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java new file mode 100644 index 0000000000..917b3d2461 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java @@ -0,0 +1,125 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.dao.entity.Queue; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class TenantMapperTest { + + @Autowired + TenantMapper tenantMapper; + + @Autowired + QueueMapper queueMapper; + + private Tenant insertOne(){ + //insertOne + Tenant tenant = new Tenant(); + tenant.setCreateTime(new Date()); + tenant.setUpdateTime(new Date()); + tenantMapper.insert(tenant); + return tenant; + } + + @Test + public void testUpdate(){ + //insertOne + Tenant tenant = insertOne(); + tenant.setUpdateTime(new Date()); + //update + int update = tenantMapper.updateById(tenant); + Assert.assertEquals(update, 1); + tenantMapper.deleteById(tenant.getId()); + } + + @Test + public void testDelete(){ + Tenant tenant = insertOne(); + int delete = tenantMapper.deleteById(tenant.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + Tenant tenant = insertOne(); + //query + List tenants = tenantMapper.selectList(null); + Assert.assertNotEquals(tenants.size(), 0); + tenantMapper.deleteById(tenant.getId()); + } + + @Test + public void testQueryById() { + + Queue queue = new Queue(); + queue.setQueueName("ut queue name"); + queue.setQueue("ut queue"); + queueMapper.insert(queue); + + + Tenant tenant = insertOne(); + tenant.setQueueId(queue.getId()); + tenantMapper.updateById(tenant); + + Tenant tenant1 = tenantMapper.queryById(tenant.getId()); + + tenantMapper.deleteById(tenant.getId()); + Assert.assertNotEquals(tenant1, null); + } + + @Test + public void testQueryByTenantCode() { + + Tenant tenant = insertOne(); + tenant.setTenantCode("ut code"); + tenantMapper.updateById(tenant); + + List tenant1 = tenantMapper.queryByTenantCode(tenant.getTenantCode()); + + tenantMapper.deleteById(tenant.getId()); + Assert.assertNotEquals(tenant1.size(), 0); + } + + @Test + public void testQueryTenantPaging() { + Tenant tenant = insertOne(); + tenant.setTenantCode("ut code"); + tenant.setTenantName("ut name"); + tenantMapper.updateById(tenant); + Page page = new Page(1,3); + + IPage tenantIPage = tenantMapper.queryTenantPaging(page, tenant.getTenantName()); + + tenantMapper.deleteById(tenant.getId()); + Assert.assertNotEquals(tenantIPage.getTotal(), 0); + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java new file mode 100644 index 0000000000..3e9671c10d --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class UDFUserMapperTest { + + @Test + public void testDeleteByUserId() { + } + + @Test + public void testDeleteByUdfFuncId() { + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java new file mode 100644 index 0000000000..001f1f510b --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class UdfFuncMapperTest { + + @Test + public void testQueryUdfByIdStr() { + } + + @Test + public void testQueryUdfFuncPaging() { + } + + @Test + public void testGetUdfFuncByType() { + } + + @Test + public void testQueryUdfFuncExceptUserId() { + } + + @Test + public void testQueryAuthedUdfFunc() { + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapperTest.java new file mode 100644 index 0000000000..3e86eab0e2 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapperTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class UserAlertGroupMapperTest { + + @Test + public void testQueryForUser() { + } + + @Test + public void testDeleteByAlertgroupId() { + } + + @Test + public void testListUserByAlertgroupId() { + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java new file mode 100644 index 0000000000..d344308579 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java @@ -0,0 +1,326 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.dolphinscheduler.dao.entity.*; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class UserMapperTest { + @Autowired + private UserMapper userMapper; + + @Autowired + AlertGroupMapper alertGroupMapper; + + @Autowired + private UserAlertGroupMapper userAlertGroupMapper; + + @Autowired + AccessTokenMapper accessTokenMapper; + + @Autowired + TenantMapper tenantMapper; + + @Autowired + QueueMapper queueMapper; + + /** + * insert one user + * @return + */ + private User insertOne(){ + User user = new User(); + user.setUserName("user1"); + user.setUserPassword("1"); + user.setEmail("xx@123.com"); + user.setUserType(UserType.GENERAL_USER); + user.setCreateTime(new Date()); + user.setTenantId(1); + user.setUpdateTime(new Date()); + userMapper.insert(user); + return user; + } + + /** + * insert one user + * @param tenant + * @return + */ + private User insertOne(Tenant tenant){ + User user = new User(); + user.setUserName("user1"); + user.setUserPassword("1"); + user.setEmail("xx@123.com"); + user.setUserType(UserType.GENERAL_USER); + user.setCreateTime(new Date()); + user.setTenantId(tenant.getId()); + user.setUpdateTime(new Date()); + userMapper.insert(user); + return user; + } + + /** + * insert one user + * @param queue + * @param tenant + * @return + */ + private User insertOne(Queue queue,Tenant tenant){ + User user = new User(); + user.setUserName("user1"); + user.setUserPassword("1"); + user.setEmail("xx@123.com"); + user.setUserType(UserType.GENERAL_USER); + user.setCreateTime(new Date()); + user.setTenantId(tenant.getId()); + user.setQueue(queue.getQueueName()); + user.setUpdateTime(new Date()); + userMapper.insert(user); + return user; + } + + /** + * insert one AlertGroup + * @return + */ + private AlertGroup insertOneAlertGroup(){ + //insertOne + AlertGroup alertGroup = new AlertGroup(); + alertGroup.setGroupName("alert group 1"); + alertGroup.setDescription("alert test1"); + alertGroup.setGroupType(AlertType.EMAIL); + + alertGroup.setCreateTime(new Date()); + alertGroup.setUpdateTime(new Date()); + alertGroupMapper.insert(alertGroup); + return alertGroup; + } + + /** + * insert one UserAlertGroup + * @param user + * @param alertGroup + * @return + */ + private UserAlertGroup insertOneUserAlertGroup(User user,AlertGroup alertGroup){ + UserAlertGroup userAlertGroup = new UserAlertGroup(); + userAlertGroup.setAlertgroupName(alertGroup.getGroupName()); + userAlertGroup.setAlertgroupId(alertGroup.getId()); + userAlertGroup.setUserId(user.getId()); + userAlertGroup.setCreateTime(new Date()); + userAlertGroup.setUpdateTime(new Date()); + userAlertGroupMapper.insert(userAlertGroup); + return userAlertGroup; + } + + /** + * insert one AccessToken + * @param user + * @return + */ + private AccessToken insertOneAccessToken(User user){ + //insertOne + AccessToken accessToken = new AccessToken(); + accessToken.setUserId(user.getId()); + accessToken.setToken("secrettoken"); + accessToken.setCreateTime(new Date()); + accessToken.setUpdateTime(new Date()); + accessToken.setExpireTime(DateUtils.getSomeHourOfDay(new Date(),-1)); + accessTokenMapper.insert(accessToken); + return accessToken; + } + + /** + * insert one Tenant + * @return + */ + private Tenant insertOneTenant(){ + Tenant tenant = new Tenant(); + tenant.setTenantCode("dolphin"); + tenant.setTenantName("dolphin test"); + tenant.setDescription("dolphin user use"); + tenant.setQueue("1"); + tenant.setCreateTime(new Date()); + tenant.setUpdateTime(new Date()); + tenantMapper.insert(tenant); + return tenant; + } + + /** + * insert one Queue + * @return + */ + private Queue insertOneQueue(){ + Queue queue = new Queue(); + queue.setQueue("dolphin"); + queue.setQueueName("dolphin queue"); + queue.setCreateTime(new Date()); + queue.setUpdateTime(new Date()); + queueMapper.insert(queue); + return queue; + } + + @Test + public void testUpdate(){ + //insertOne + User user = insertOne(); + //update + user.setEmail("xx-update@126.com"); + user.setUserName("user1_update"); + user.setUserType(UserType.ADMIN_USER); + int update = userMapper.updateById(user); + Assert.assertEquals(update, 1); + userMapper.deleteById(user.getId()); + } + + @Test + public void testDelete(){ + //insertOne + User user = insertOne(); + //delete + int delete = userMapper.deleteById(user.getId()); + Assert.assertEquals(delete, 1); + userMapper.deleteById(user.getId()); + } + + @Test + public void testQuery() { + //insertOne + User user = insertOne(); + //query + List userList = userMapper.selectList(null); + Assert.assertNotEquals(userList.size(), 0); + userMapper.deleteById(user.getId()); + } + + @Test + public void testQueryAllGeneralUser() { + //insertOne + User user = insertOne(); + //queryAllGeneralUser + List userList = userMapper.queryAllGeneralUser(); + Assert.assertNotEquals(userList.size(), 0); + userMapper.deleteById(user.getId()); + } + + @Test + public void testQueryByUserNameAccurately() { + //insertOne + User user = insertOne(); + //queryByUserNameAccurately + User queryUser = userMapper.queryByUserNameAccurately(user.getUserName()); + Assert.assertEquals(queryUser.getUserName(), user.getUserName()); + userMapper.deleteById(user.getId()); + } + + @Test + public void testQueryUserByNamePassword() { + //insertOne + User user = insertOne(); + //queryUserByNamePassword + User queryUser = userMapper.queryUserByNamePassword(user.getUserName(),user.getUserPassword()); + Assert.assertEquals(queryUser.getUserName(),user.getUserName()); + Assert.assertEquals(queryUser.getUserPassword(),user.getUserPassword()); + userMapper.deleteById(user.getId()); + } + + @Test + public void testQueryUserPaging() { + //insertOneQueue + Queue queue = insertOneQueue(); + //insertOneTenant + Tenant tenant = insertOneTenant(); + //insertOne + User user = insertOne(queue,tenant); + //queryUserPaging + Page page = new Page(1,3); + IPage userIPage = userMapper.queryUserPaging(page, user.getUserName()); + Assert.assertNotEquals(userIPage.getTotal(), 0); + queueMapper.deleteById(queue.getId()); + tenantMapper.deleteById(tenant.getId()); + userMapper.deleteById(user.getId()); + } + + @Test + public void testQueryDetailsById() { + //insertOne + User user = insertOne(); + //queryDetailsById + User queryUser = userMapper.queryDetailsById(user.getId()); + Assert.assertEquals(queryUser,user); + userMapper.deleteById(user.getId()); + } + + @Test + public void testQueryUserListByAlertGroupId() { + //insertOne + User user = insertOne(); + //insertOneAlertGroup + AlertGroup alertGroup = insertOneAlertGroup(); + //insertOneUserAlertGroup + UserAlertGroup userAlertGroup = insertOneUserAlertGroup(user, alertGroup); + //queryUserListByAlertGroupId + List userList = userMapper.queryUserListByAlertGroupId(userAlertGroup.getAlertgroupId()); + Assert.assertNotEquals(userList.size(), 0); + userMapper.deleteById(user.getId()); + alertGroupMapper.deleteById(alertGroup.getId()); + userAlertGroupMapper.deleteById(userAlertGroup.getAlertgroupId()); + + } + + @Test + public void testQueryTenantCodeByUserId() { + //insertOneTenant + Tenant tenant = insertOneTenant(); + //insertOne + User user = insertOne(tenant); + //queryTenantCodeByUserId + User queryUser = userMapper.queryTenantCodeByUserId(user.getId()); + Assert.assertEquals(queryUser,user); + userMapper.deleteById(user.getId()); + tenantMapper.deleteById(tenant.getId()); + } + + @Test + public void testQueryUserByToken() { + //insertOne + User user = insertOne(); + //insertOneAccessToken + AccessToken accessToken = insertOneAccessToken(user); + //queryUserByToken + User userToken = userMapper.queryUserByToken(accessToken.getToken()); + Assert.assertEquals(userToken,user); + userMapper.deleteById(user.getId()); + accessTokenMapper.deleteById(accessToken.getId()); + + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapperTest.java new file mode 100644 index 0000000000..910a785fd5 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapperTest.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.mapper; + + +import org.apache.dolphinscheduler.dao.entity.WorkerGroup; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Date; +import java.util.List; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class WorkerGroupMapperTest { + @Autowired + WorkerGroupMapper workerGroupMapper; + + private WorkerGroup insertOne(){ + //insertOne + WorkerGroup workerGroup = new WorkerGroup(); + + String name = "workerGroup3"; + workerGroup.setName(name); + workerGroup.setIpList("192.168.220.154,192.168.220.188"); + workerGroup.setCreateTime(new Date()); + workerGroup.setUpdateTime(new Date()); + workerGroupMapper.insert(workerGroup); + return workerGroup; + } + + + @Test + public void testUpdate(){ + //insertOne + WorkerGroup workerGroup = insertOne(); + //update + workerGroup.setName("workerGroup11"); + int update = workerGroupMapper.updateById(workerGroup); + Assert.assertEquals(update, 1); + workerGroupMapper.deleteById(workerGroup.getId()); + } + + @Test + public void testDelete(){ + //insertOne + WorkerGroup workerGroup = insertOne(); + //delete + int delete = workerGroupMapper.deleteById(workerGroup.getId()); + Assert.assertEquals(delete, 1); + } + + @Test + public void testQuery() { + //insertOne + WorkerGroup workerGroup = insertOne(); + //query + List workerGroupList = workerGroupMapper.selectList(null); + Assert.assertNotEquals(workerGroupList.size(), 0); + workerGroupMapper.deleteById(workerGroup.getId()); + } + + @Test + public void testQueryAllWorkerGroup() { + //insertOne + WorkerGroup workerGroup = insertOne(); + //queryAllWorkerGroup + List workerGroupList = workerGroupMapper.queryAllWorkerGroup(); + Assert.assertNotEquals(workerGroupList.size(), 0); + workerGroupMapper.deleteById(workerGroup.getId()); + } + + @Test + public void testQueryWorkerGroupByName() { + //insertOne + WorkerGroup workerGroup = insertOne(); + //queryWorkerGroupByName + List workerGroupList = workerGroupMapper.queryWorkerGroupByName(workerGroup.getName()); + Assert.assertNotEquals(workerGroupList.size(), 0); + workerGroupMapper.deleteById(workerGroup.getId()); + } + + @Test + public void testQueryListPaging() { + //insertOne + WorkerGroup workerGroup = insertOne(); + //queryListPaging + Page page = new Page(1,3); + IPage workerGroupIPage = workerGroupMapper.queryListPaging(page, workerGroup.getName()); + Assert.assertNotEquals(workerGroupIPage.getTotal(), 0); + workerGroupMapper.deleteById(workerGroup.getId()); + } +} \ No newline at end of file diff --git a/escheduler-dao/src/test/resources/dao/data_source.properties b/dolphinscheduler-dao/src/test/resources/dao/data_source.properties similarity index 100% rename from escheduler-dao/src/test/resources/dao/data_source.properties rename to dolphinscheduler-dao/src/test/resources/dao/data_source.properties diff --git a/dolphinscheduler-rpc/pom.xml b/dolphinscheduler-rpc/pom.xml new file mode 100644 index 0000000000..30567310be --- /dev/null +++ b/dolphinscheduler-rpc/pom.xml @@ -0,0 +1,121 @@ + + + + + org.apache.dolphinscheduler + dolphinscheduler + 1.1.0-SNAPSHOT + + 4.0.0 + + dolphinscheduler-rpc + + dolphinscheduler-rpc + https://github.com/apache/incubator-dolphinscheduler + + + UTF-8 + 1.7 + 1.7 + + ${project.basedir}/src/main/java + 3.5.1 + 1.9.0 + + + + + com.google.protobuf + protobuf-java + ${protobuf.version} + + + io.grpc + grpc-netty + ${grpc.version} + + + io.grpc + grpc-protobuf + ${grpc.version} + + + io.grpc + grpc-stub + ${grpc.version} + + + + com.google.guava + guava + + + + + + + kr.motd.maven + os-maven-plugin + 1.5.0.Final + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + 0.5.0 + + com.google.protobuf:protoc:3.5.1-1:exe:${os.detected.classifier} + grpc-java + io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier} + + + + compile + + compile + + + + compile-custom + + compile-custom + + + ${protobuf.output.directory} + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 1.7 + + + add-classes + generate-sources + + add-source + + + + ${protobuf.output.directory} + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${java.version} + ${java.version} + ${project.build.sourceEncoding} + + + + + diff --git a/dolphinscheduler-rpc/src/main/java/org/apache/dolphinscheduler/rpc/LogViewServiceGrpc.java b/dolphinscheduler-rpc/src/main/java/org/apache/dolphinscheduler/rpc/LogViewServiceGrpc.java new file mode 100644 index 0000000000..4ea4646118 --- /dev/null +++ b/dolphinscheduler-rpc/src/main/java/org/apache/dolphinscheduler/rpc/LogViewServiceGrpc.java @@ -0,0 +1,499 @@ +package org.apache.dolphinscheduler.rpc; + +import static io.grpc.MethodDescriptor.generateFullMethodName; +import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall; +import static io.grpc.stub.ClientCalls.asyncClientStreamingCall; +import static io.grpc.stub.ClientCalls.asyncServerStreamingCall; +import static io.grpc.stub.ClientCalls.asyncUnaryCall; +import static io.grpc.stub.ClientCalls.blockingServerStreamingCall; +import static io.grpc.stub.ClientCalls.blockingUnaryCall; +import static io.grpc.stub.ClientCalls.futureUnaryCall; +import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall; +import static io.grpc.stub.ServerCalls.asyncClientStreamingCall; +import static io.grpc.stub.ServerCalls.asyncServerStreamingCall; +import static io.grpc.stub.ServerCalls.asyncUnaryCall; +import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall; +import static io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall; + +/** + *
+ **
+ *  log view service
+ * 
+ */ +@javax.annotation.Generated( + value = "by gRPC proto compiler (version 1.9.0)", + comments = "Source: scheduler.proto") +public final class LogViewServiceGrpc { + + private LogViewServiceGrpc() {} + + public static final String SERVICE_NAME = "schduler.LogViewService"; + + // Static method descriptors that strictly reflect the proto. + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getRollViewLogMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_ROLL_VIEW_LOG = getRollViewLogMethod(); + + private static volatile io.grpc.MethodDescriptor getRollViewLogMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getRollViewLogMethod() { + io.grpc.MethodDescriptor getRollViewLogMethod; + if ((getRollViewLogMethod = LogViewServiceGrpc.getRollViewLogMethod) == null) { + synchronized (LogViewServiceGrpc.class) { + if ((getRollViewLogMethod = LogViewServiceGrpc.getRollViewLogMethod) == null) { + LogViewServiceGrpc.getRollViewLogMethod = getRollViewLogMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "schduler.LogViewService", "rollViewLog")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.dolphinscheduler.rpc.LogParameter.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.dolphinscheduler.rpc.RetStrInfo.getDefaultInstance())) + .setSchemaDescriptor(new LogViewServiceMethodDescriptorSupplier("rollViewLog")) + .build(); + } + } + } + return getRollViewLogMethod; + } + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getViewLogMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_VIEW_LOG = getViewLogMethod(); + + private static volatile io.grpc.MethodDescriptor getViewLogMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getViewLogMethod() { + io.grpc.MethodDescriptor getViewLogMethod; + if ((getViewLogMethod = LogViewServiceGrpc.getViewLogMethod) == null) { + synchronized (LogViewServiceGrpc.class) { + if ((getViewLogMethod = LogViewServiceGrpc.getViewLogMethod) == null) { + LogViewServiceGrpc.getViewLogMethod = getViewLogMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "schduler.LogViewService", "viewLog")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.dolphinscheduler.rpc.PathParameter.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.dolphinscheduler.rpc.RetStrInfo.getDefaultInstance())) + .setSchemaDescriptor(new LogViewServiceMethodDescriptorSupplier("viewLog")) + .build(); + } + } + } + return getViewLogMethod; + } + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getGetLogBytesMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_GET_LOG_BYTES = getGetLogBytesMethod(); + + private static volatile io.grpc.MethodDescriptor getGetLogBytesMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getGetLogBytesMethod() { + io.grpc.MethodDescriptor getGetLogBytesMethod; + if ((getGetLogBytesMethod = LogViewServiceGrpc.getGetLogBytesMethod) == null) { + synchronized (LogViewServiceGrpc.class) { + if ((getGetLogBytesMethod = LogViewServiceGrpc.getGetLogBytesMethod) == null) { + LogViewServiceGrpc.getGetLogBytesMethod = getGetLogBytesMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "schduler.LogViewService", "getLogBytes")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.dolphinscheduler.rpc.PathParameter.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.dolphinscheduler.rpc.RetByteInfo.getDefaultInstance())) + .setSchemaDescriptor(new LogViewServiceMethodDescriptorSupplier("getLogBytes")) + .build(); + } + } + } + return getGetLogBytesMethod; + } + + /** + * Creates a new async stub that supports all call types for the service + */ + public static LogViewServiceStub newStub(io.grpc.Channel channel) { + return new LogViewServiceStub(channel); + } + + /** + * Creates a new blocking-style stub that supports unary and streaming output calls on the service + */ + public static LogViewServiceBlockingStub newBlockingStub( + io.grpc.Channel channel) { + return new LogViewServiceBlockingStub(channel); + } + + /** + * Creates a new ListenableFuture-style stub that supports unary calls on the service + */ + public static LogViewServiceFutureStub newFutureStub( + io.grpc.Channel channel) { + return new LogViewServiceFutureStub(channel); + } + + /** + *
+   **
+   *  log view service
+   * 
+ */ + public static abstract class LogViewServiceImplBase implements io.grpc.BindableService { + + /** + *
+     **
+     *  roll view log
+     * 
+ */ + public void rollViewLog(org.apache.dolphinscheduler.rpc.LogParameter request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getRollViewLogMethod(), responseObserver); + } + + /** + *
+     **
+     * view all log
+     * 
+ */ + public void viewLog(org.apache.dolphinscheduler.rpc.PathParameter request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getViewLogMethod(), responseObserver); + } + + /** + *
+     **
+     * get log bytes
+     * 
+ */ + public void getLogBytes(org.apache.dolphinscheduler.rpc.PathParameter request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getGetLogBytesMethod(), responseObserver); + } + + @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { + return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) + .addMethod( + getRollViewLogMethod(), + asyncUnaryCall( + new MethodHandlers< + org.apache.dolphinscheduler.rpc.LogParameter, + org.apache.dolphinscheduler.rpc.RetStrInfo>( + this, METHODID_ROLL_VIEW_LOG))) + .addMethod( + getViewLogMethod(), + asyncUnaryCall( + new MethodHandlers< + org.apache.dolphinscheduler.rpc.PathParameter, + org.apache.dolphinscheduler.rpc.RetStrInfo>( + this, METHODID_VIEW_LOG))) + .addMethod( + getGetLogBytesMethod(), + asyncUnaryCall( + new MethodHandlers< + org.apache.dolphinscheduler.rpc.PathParameter, + org.apache.dolphinscheduler.rpc.RetByteInfo>( + this, METHODID_GET_LOG_BYTES))) + .build(); + } + } + + /** + *
+   **
+   *  log view service
+   * 
+ */ + public static final class LogViewServiceStub extends io.grpc.stub.AbstractStub { + private LogViewServiceStub(io.grpc.Channel channel) { + super(channel); + } + + private LogViewServiceStub(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected LogViewServiceStub build(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + return new LogViewServiceStub(channel, callOptions); + } + + /** + *
+     **
+     *  roll view log
+     * 
+ */ + public void rollViewLog(org.apache.dolphinscheduler.rpc.LogParameter request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getRollViewLogMethod(), getCallOptions()), request, responseObserver); + } + + /** + *
+     **
+     * view all log
+     * 
+ */ + public void viewLog(org.apache.dolphinscheduler.rpc.PathParameter request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getViewLogMethod(), getCallOptions()), request, responseObserver); + } + + /** + *
+     **
+     * get log bytes
+     * 
+ */ + public void getLogBytes(org.apache.dolphinscheduler.rpc.PathParameter request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getGetLogBytesMethod(), getCallOptions()), request, responseObserver); + } + } + + /** + *
+   **
+   *  log view service
+   * 
+ */ + public static final class LogViewServiceBlockingStub extends io.grpc.stub.AbstractStub { + private LogViewServiceBlockingStub(io.grpc.Channel channel) { + super(channel); + } + + private LogViewServiceBlockingStub(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected LogViewServiceBlockingStub build(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + return new LogViewServiceBlockingStub(channel, callOptions); + } + + /** + *
+     **
+     *  roll view log
+     * 
+ */ + public org.apache.dolphinscheduler.rpc.RetStrInfo rollViewLog(org.apache.dolphinscheduler.rpc.LogParameter request) { + return blockingUnaryCall( + getChannel(), getRollViewLogMethod(), getCallOptions(), request); + } + + /** + *
+     **
+     * view all log
+     * 
+ */ + public org.apache.dolphinscheduler.rpc.RetStrInfo viewLog(org.apache.dolphinscheduler.rpc.PathParameter request) { + return blockingUnaryCall( + getChannel(), getViewLogMethod(), getCallOptions(), request); + } + + /** + *
+     **
+     * get log bytes
+     * 
+ */ + public org.apache.dolphinscheduler.rpc.RetByteInfo getLogBytes(org.apache.dolphinscheduler.rpc.PathParameter request) { + return blockingUnaryCall( + getChannel(), getGetLogBytesMethod(), getCallOptions(), request); + } + } + + /** + *
+   **
+   *  log view service
+   * 
+ */ + public static final class LogViewServiceFutureStub extends io.grpc.stub.AbstractStub { + private LogViewServiceFutureStub(io.grpc.Channel channel) { + super(channel); + } + + private LogViewServiceFutureStub(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected LogViewServiceFutureStub build(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + return new LogViewServiceFutureStub(channel, callOptions); + } + + /** + *
+     **
+     *  roll view log
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture rollViewLog( + org.apache.dolphinscheduler.rpc.LogParameter request) { + return futureUnaryCall( + getChannel().newCall(getRollViewLogMethod(), getCallOptions()), request); + } + + /** + *
+     **
+     * view all log
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture viewLog( + org.apache.dolphinscheduler.rpc.PathParameter request) { + return futureUnaryCall( + getChannel().newCall(getViewLogMethod(), getCallOptions()), request); + } + + /** + *
+     **
+     * get log bytes
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture getLogBytes( + org.apache.dolphinscheduler.rpc.PathParameter request) { + return futureUnaryCall( + getChannel().newCall(getGetLogBytesMethod(), getCallOptions()), request); + } + } + + private static final int METHODID_ROLL_VIEW_LOG = 0; + private static final int METHODID_VIEW_LOG = 1; + private static final int METHODID_GET_LOG_BYTES = 2; + + private static final class MethodHandlers implements + io.grpc.stub.ServerCalls.UnaryMethod, + io.grpc.stub.ServerCalls.ServerStreamingMethod, + io.grpc.stub.ServerCalls.ClientStreamingMethod, + io.grpc.stub.ServerCalls.BidiStreamingMethod { + private final LogViewServiceImplBase serviceImpl; + private final int methodId; + + MethodHandlers(LogViewServiceImplBase serviceImpl, int methodId) { + this.serviceImpl = serviceImpl; + this.methodId = methodId; + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + case METHODID_ROLL_VIEW_LOG: + serviceImpl.rollViewLog((org.apache.dolphinscheduler.rpc.LogParameter) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_VIEW_LOG: + serviceImpl.viewLog((org.apache.dolphinscheduler.rpc.PathParameter) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_GET_LOG_BYTES: + serviceImpl.getLogBytes((org.apache.dolphinscheduler.rpc.PathParameter) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + default: + throw new AssertionError(); + } + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public io.grpc.stub.StreamObserver invoke( + io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + default: + throw new AssertionError(); + } + } + } + + private static abstract class LogViewServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { + LogViewServiceBaseDescriptorSupplier() {} + + @java.lang.Override + public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { + return org.apache.dolphinscheduler.rpc.SchdulerProto.getDescriptor(); + } + + @java.lang.Override + public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { + return getFileDescriptor().findServiceByName("LogViewService"); + } + } + + private static final class LogViewServiceFileDescriptorSupplier + extends LogViewServiceBaseDescriptorSupplier { + LogViewServiceFileDescriptorSupplier() {} + } + + private static final class LogViewServiceMethodDescriptorSupplier + extends LogViewServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { + private final String methodName; + + LogViewServiceMethodDescriptorSupplier(String methodName) { + this.methodName = methodName; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { + return getServiceDescriptor().findMethodByName(methodName); + } + } + + private static volatile io.grpc.ServiceDescriptor serviceDescriptor; + + public static io.grpc.ServiceDescriptor getServiceDescriptor() { + io.grpc.ServiceDescriptor result = serviceDescriptor; + if (result == null) { + synchronized (LogViewServiceGrpc.class) { + result = serviceDescriptor; + if (result == null) { + serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) + .setSchemaDescriptor(new LogViewServiceFileDescriptorSupplier()) + .addMethod(getRollViewLogMethod()) + .addMethod(getViewLogMethod()) + .addMethod(getGetLogBytesMethod()) + .build(); + } + } + } + return result; + } +} diff --git a/dolphinscheduler-rpc/src/main/proto/scheduler.proto b/dolphinscheduler-rpc/src/main/proto/scheduler.proto new file mode 100644 index 0000000000..e1c5664b07 --- /dev/null +++ b/dolphinscheduler-rpc/src/main/proto/scheduler.proto @@ -0,0 +1,83 @@ +syntax = "proto3"; + +package schduler; + +option java_multiple_files = true; +option java_package = "org.apache.dolphinscheduler.rpc"; +option java_outer_classname = "SchdulerProto"; + + +/** + * return str info + */ +message RetStrInfo { + /** + * str msg info + */ + string msg = 1 ; +} + +/** + * return byte info + */ +message RetByteInfo { + /** + * byte data info + */ + bytes data = 1; +} + +/** + * log parameter + */ +message LogParameter { + + /** + * path + */ + string path = 1 ; + + /** + * skip line num + */ + int32 skipLineNum = 2 ; + + /** + * display limt num + */ + int32 limit = 3 ; +} + + +/** + * path parameter + */ +message PathParameter { + + /** + * path + */ + string path = 1 ; +} + +/** + * log view service + */ +service LogViewService { + + /** + * roll view log + */ + rpc rollViewLog(LogParameter) returns (RetStrInfo) {}; + + /** + * view all log + */ + rpc viewLog(PathParameter) returns (RetStrInfo) {}; + + /** + * get log bytes + */ + rpc getLogBytes(PathParameter) returns (RetByteInfo) {}; +} + diff --git a/dolphinscheduler-server/pom.xml b/dolphinscheduler-server/pom.xml new file mode 100644 index 0000000000..36906f0cf2 --- /dev/null +++ b/dolphinscheduler-server/pom.xml @@ -0,0 +1,127 @@ + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + 1.1.0-SNAPSHOT + + dolphinscheduler-server + dolphinscheduler-server + http://maven.apache.org + jar + + UTF-8 + + + + + org.apache.dolphinscheduler + dolphinscheduler-common + + + protobuf-java + com.google.protobuf + + + io.netty + netty + + + io.netty + netty-all + + + com.google + netty + + + log4j-slf4j-impl + org.apache.logging.log4j + + + + + org.apache.dolphinscheduler + dolphinscheduler-dao + + + spring-boot-starter-logging + org.springframework.boot + + + + + + org.apache.dolphinscheduler + dolphinscheduler-rpc + + + org.apache.curator + curator-framework + + + org.apache.zookeeper + zookeeper + + + + + org.apache.curator + curator-recipes + + + + org.apache.httpcomponents + httpclient + + + org.apache.httpcomponents + httpcore + + + junit + junit + test + + + + org.apache.dolphinscheduler + dolphinscheduler-alert + + + + + + + + + maven-assembly-plugin + + + src/main/assembly/package.xml + + false + + + + make-assembly + package + + single + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${java.version} + ${java.version} + ${project.build.sourceEncoding} + + + + + + diff --git a/dolphinscheduler-server/src/main/assembly/package.xml b/dolphinscheduler-server/src/main/assembly/package.xml new file mode 100644 index 0000000000..7b4f5fd583 --- /dev/null +++ b/dolphinscheduler-server/src/main/assembly/package.xml @@ -0,0 +1,74 @@ + + cluster + + dir + + false + + + src/main/resources + + **/*.properties + **/*.xml + **/*.json + + conf + + + ${project.parent.basedir}/dolphinscheduler-common/src/main/resources + + **/*.properties + **/*.xml + **/*.json + + conf + + + ${project.parent.basedir}/dolphinscheduler-common/src/main/resources/bin + + *.* + + 755 + bin + + + ${project.parent.basedir}/dolphinscheduler-dao/src/main/resources + + **/*.properties + **/*.xml + **/*.json + + conf + + + ${project.parent.basedir}/dolphinscheduler-api/src/main/resources + + **/*.properties + **/*.xml + **/*.json + + conf + + + target/ + + dolphinscheduler-server-${project.version}.jar + + lib + + + + + lib + true + + javax.servlet:servlet-api + org.eclipse.jetty.aggregate:jetty-all + org.slf4j:slf4j-log4j12 + + + + \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/AbstractServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/AbstractServer.java new file mode 100644 index 0000000000..60948ad1b6 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/AbstractServer.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master; + +import org.apache.dolphinscheduler.common.IStoppable; +import org.apache.commons.configuration.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.CommandLineRunner; +import org.springframework.context.annotation.ComponentScan; + +/** + * master server + */ +@ComponentScan("cn.escheduler") +public abstract class AbstractServer implements CommandLineRunner, IStoppable { + + private static final Logger logger = LoggerFactory.getLogger(AbstractServer.class); + + /** + * conf + */ + protected static Configuration conf; + + /** + * object lock + */ + protected final Object lock = new Object(); + + /** + * whether or not to close the state + */ + protected boolean terminated = false; + + + /** + * heartbeat interval, unit second + */ + protected int heartBeatInterval; + + + + /** + * blocking implement + * @throws InterruptedException + */ + public void awaitTermination() throws InterruptedException { + synchronized (lock) { + while (!terminated) { + lock.wait(); + } + } + } + + + /** + * Callback used to run the bean. + * @param args incoming main method arguments + * @throws Exception on error + */ + @Override + public abstract void run(String... args) throws Exception; + + /** + * gracefully stop + * @param cause why stopping + */ + @Override + public abstract void stop(String cause); +} + diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java new file mode 100644 index 0000000000..45ca9f625f --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java @@ -0,0 +1,275 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerThread; +import org.apache.dolphinscheduler.server.quartz.ProcessScheduleJob; +import org.apache.dolphinscheduler.server.quartz.QuartzExecutors; +import org.apache.dolphinscheduler.server.zk.ZKMasterClient; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.lang3.StringUtils; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.SpringApplication; +import org.springframework.context.annotation.ComponentScan; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +/** + * master server + */ +@ComponentScan("cn.escheduler") +public class MasterServer extends AbstractServer { + + private static final Logger logger = LoggerFactory.getLogger(MasterServer.class); + + /** + * zk master client + */ + private static ZKMasterClient zkMasterClient = null; + + /** + * heartbeat thread pool + */ + private ScheduledExecutorService heartbeatMasterService; + + /** + * escheduler database interface + */ + @Autowired + protected ProcessDao processDao; + + /** + * master exec thread pool + */ + private ExecutorService masterSchedulerService; + + public MasterServer(){} + + public MasterServer(ProcessDao processDao){ + try { + conf = new PropertiesConfiguration(Constants.MASTER_PROPERTIES_PATH); + }catch (ConfigurationException e){ + logger.error("load configuration failed : " + e.getMessage(),e); + System.exit(1); + } + zkMasterClient = ZKMasterClient.getZKMasterClient(processDao); + this.masterSchedulerService = ThreadUtils.newDaemonSingleThreadExecutor("Master-Scheduler-Thread"); + } + + + /** + * master server startup + * + * master server not use web service + */ + public static void main(String[] args) { + SpringApplication app = new SpringApplication(MasterServer.class); + + app.run(args); + } + + + @Override + public void run(String... strings) throws Exception { + + MasterServer masterServer = new MasterServer(processDao); + + masterServer.run(processDao); + + logger.info("master server started"); + // blocking + masterServer.awaitTermination(); + } + + + public void run(ProcessDao processDao){ + + // heartbeat interval + heartBeatInterval = conf.getInt(Constants.MASTER_HEARTBEAT_INTERVAL, + Constants.defaultMasterHeartbeatInterval); + + // master exec thread pool num + int masterExecThreadNum = conf.getInt(Constants.MASTER_EXEC_THREADS, + Constants.defaultMasterExecThreadNum); + + + heartbeatMasterService = ThreadUtils.newDaemonThreadScheduledExecutor("Master-Main-Thread",Constants.defaulMasterHeartbeatThreadNum); + + // heartbeat thread implement + Runnable heartBeatThread = heartBeatThread(); + + zkMasterClient.setStoppable(this); + + // regular heartbeat + // delay 5 seconds, send heartbeat every 30 seconds + heartbeatMasterService. + scheduleAtFixedRate(heartBeatThread, 5, heartBeatInterval, TimeUnit.SECONDS); + + // master scheduler thread + MasterSchedulerThread masterSchedulerThread = new MasterSchedulerThread( + zkMasterClient, + processDao,conf, + masterExecThreadNum); + + // submit master scheduler thread + masterSchedulerService.execute(masterSchedulerThread); + + // start QuartzExecutors + // what system should do if exception + try { + ProcessScheduleJob.init(processDao); + QuartzExecutors.getInstance().start(); + } catch (Exception e) { + try { + QuartzExecutors.getInstance().shutdown(); + } catch (SchedulerException e1) { + logger.error("QuartzExecutors shutdown failed : " + e1.getMessage(), e1); + } + logger.error("start Quartz failed : " + e.getMessage(), e); + } + + + /** + * register hooks, which are called before the process exits + */ + Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { + @Override + public void run() { + logger.info("master server stopped"); + if (zkMasterClient.getActiveMasterNum() <= 1) { + for (int i = 0; i < Constants.ESCHEDULER_WARN_TIMES_FAILOVER;i++) { + zkMasterClient.getAlertDao().sendServerStopedAlert( + 1, OSUtils.getHost(), "Master-Server"); + } + } + } + })); + } + + + /** + * gracefully stop + * @param cause why stopping + */ + @Override + public synchronized void stop(String cause) { + + try { + //execute only once + if(Stopper.isStoped()){ + return; + } + + logger.info("master server is stopping ..., cause : {}", cause); + + // set stop signal is true + Stopper.stop(); + + try { + //thread sleep 3 seconds for thread quitely stop + Thread.sleep(3000L); + }catch (Exception e){ + logger.warn("thread sleep exception:" + e.getMessage(), e); + } + try { + heartbeatMasterService.shutdownNow(); + }catch (Exception e){ + logger.warn("heartbeat service stopped exception"); + } + + logger.info("heartbeat service stopped"); + + //close quartz + try{ + QuartzExecutors.getInstance().shutdown(); + }catch (Exception e){ + logger.warn("Quartz service stopped exception:{}",e.getMessage()); + } + + logger.info("Quartz service stopped"); + + try { + ThreadPoolExecutors.getInstance().shutdown(); + }catch (Exception e){ + logger.warn("threadpool service stopped exception:{}",e.getMessage()); + } + + logger.info("threadpool service stopped"); + + try { + masterSchedulerService.shutdownNow(); + }catch (Exception e){ + logger.warn("master scheduler service stopped exception:{}",e.getMessage()); + } + + logger.info("master scheduler service stopped"); + + try { + zkMasterClient.close(); + }catch (Exception e){ + logger.warn("zookeeper service stopped exception:{}",e.getMessage()); + } + + logger.info("zookeeper service stopped"); + + synchronized (lock) { + terminated = true; + lock.notifyAll(); + } + + } catch (Exception e) { + logger.error("master server stop exception : " + e.getMessage(), e); + System.exit(-1); + } + } + + + /** + * heartbeat thread implement + * @return + */ + private Runnable heartBeatThread(){ + Runnable heartBeatThread = new Runnable() { + @Override + public void run() { + if(Stopper.isRunning()) { + // send heartbeat to zk + if (StringUtils.isBlank(zkMasterClient.getMasterZNode())) { + logger.error("master send heartbeat to zk failed: can't find zookeeper path of master server"); + return; + } + + zkMasterClient.heartBeatForZk(zkMasterClient.getMasterZNode(), Constants.MASTER_PREFIX); + } + } + }; + return heartBeatThread; + } +} + diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/log/MasterLogFilter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/log/MasterLogFilter.java new file mode 100644 index 0000000000..6f2ccf291d --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/log/MasterLogFilter.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.log; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.filter.Filter; +import ch.qos.logback.core.spi.FilterReply; + +/** + * master log filter + */ +public class MasterLogFilter extends Filter { + + Level level; + + @Override + public FilterReply decide(ILoggingEvent event) { + if (event.getThreadName().startsWith("Master-")){ + return FilterReply.ACCEPT; + } + return FilterReply.DENY; + } + + public void setLevel(String level) { + this.level = Level.toLevel(level); + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java new file mode 100644 index 0000000000..9e856b39cd --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.runner; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.queue.ITaskQueue; +import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.utils.BeanContext; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.Callable; + +/** + * master task exec base class + */ +public class MasterBaseTaskExecThread implements Callable { + + private static final Logger logger = LoggerFactory.getLogger(MasterBaseTaskExecThread.class); + + /** + * process dao + */ + protected ProcessDao processDao; + + /** + * alert database access + */ + protected AlertDao alertDao; + + /** + * process instance + */ + protected ProcessInstance processInstance; + + /** + * task instance + */ + protected TaskInstance taskInstance; + + /** + * task queue + */ + protected ITaskQueue taskQueue; + protected boolean cancel; + + /** + * load configuration file + */ + private static Configuration conf; + + static { + try { + conf = new PropertiesConfiguration(Constants.MASTER_PROPERTIES_PATH); + } catch (ConfigurationException e) { + logger.error(e.getMessage(), e); + System.exit(1); + } + } + + public MasterBaseTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ + this.processDao = BeanContext.getBean(ProcessDao.class); + this.alertDao = BeanContext.getBean(AlertDao.class); + this.processInstance = processInstance; + this.taskQueue = TaskQueueFactory.getTaskQueueInstance(); + this.cancel = false; + this.taskInstance = taskInstance; + } + + public TaskInstance getTaskInstance(){ + return this.taskInstance; + } + + public void kill(){ + this.cancel = true; + } + + protected TaskInstance submit(){ + Integer commitRetryTimes = conf.getInt(Constants.MASTER_COMMIT_RETRY_TIMES, + Constants.defaultMasterCommitRetryTimes); + Integer commitRetryInterval = conf.getInt(Constants.MASTER_COMMIT_RETRY_INTERVAL, + Constants.defaultMasterCommitRetryInterval); + + int retryTimes = 1; + + while (retryTimes <= commitRetryTimes){ + try { + TaskInstance task = processDao.submitTask(taskInstance, processInstance); + if(task != null){ + return task; + } + logger.error("task commit to mysql and queue failed , task has already retry {} times, please check the database", commitRetryTimes); + Thread.sleep(commitRetryInterval); + } catch (Exception e) { + logger.error("task commit to mysql and queue failed : " + e.getMessage(),e); + } + retryTimes += 1; + } + return null; + } + + protected Boolean submitWaitComplete(){ + return true; + } + + @Override + public Boolean call() throws Exception { + return submitWaitComplete(); + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java new file mode 100644 index 0000000000..3e7c3a7108 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java @@ -0,0 +1,1042 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.runner; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.utils.DagHelper; +import org.apache.dolphinscheduler.server.utils.AlertManager; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.dolphinscheduler.common.utils.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * master exec thread,split dag + */ +public class MasterExecThread implements Runnable { + + private static final Logger logger = LoggerFactory.getLogger(MasterExecThread.class); + + /** + * process instance + */ + private ProcessInstance processInstance; + + + /** + * runing TaskNode + */ + private final Map> activeTaskNode = new ConcurrentHashMap>(); + + private final ExecutorService taskExecService; + + /** + * submit failure nodes + */ + private Boolean taskFailedSubmit = false; + private List recoverNodeIdList = new ArrayList<>(); + private Map errorTaskList = new ConcurrentHashMap<>(); + private Map completeTaskList = new ConcurrentHashMap<>(); + private Map readyToSubmitTaskList = new ConcurrentHashMap<>(); + private Map dependFailedTask = new ConcurrentHashMap<>(); + private Map forbiddenTaskList = new ConcurrentHashMap<>(); + private List recoverToleranceFaultTaskList = new ArrayList<>(); + + private AlertManager alertManager = new AlertManager(); + + private DAG dag; + + /** + * process dao + */ + private ProcessDao processDao; + + /** + * load configuration file + */ + private static Configuration conf; + + public MasterExecThread(ProcessInstance processInstance){ + this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); + + this.processInstance = processInstance; + + int masterTaskExecNum = conf.getInt(Constants.MASTER_EXEC_TASK_THREADS, + Constants.defaultMasterTaskExecNum); + this.taskExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Task-Exec-Thread", + masterTaskExecNum); + } + + + static { + try { + conf = new PropertiesConfiguration(Constants.MASTER_PROPERTIES_PATH); + }catch (ConfigurationException e){ + logger.error("load configuration failed : " + e.getMessage(),e); + System.exit(1); + } + } + + @Override + public void run() { + + // process instance is null + if (processInstance == null){ + logger.info("process instance is not exists"); + return; + } + + // check to see if it's done + if (processInstance.getState().typeIsFinished()){ + logger.info("process instance is done : {}",processInstance.getId()); + return; + } + + try { + if (processInstance.isComplementData() && Flag.NO == processInstance.getIsSubProcess()){ + // sub process complement data + executeComplementProcess(); + }else{ + // execute flow + executeProcess(); + } + }catch (Exception e){ + logger.error("master exec thread exception: " + e.getMessage(), e); + logger.error("process execute failed, process id:{}", processInstance.getId()); + processInstance.setState(ExecutionStatus.FAILURE); + processInstance.setEndTime(new Date()); + processDao.updateProcessInstance(processInstance); + }finally { + taskExecService.shutdown(); + // post handle + postHandle(); + } + } + + private void executeProcess() throws Exception { + prepareProcess(); + runProcess(); + endProcess(); + } + + /** + * execute complement process + * @throws Exception + */ + private void executeComplementProcess() throws Exception { + + Map cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); + + Date startDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); + Date endDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); + processDao.saveProcessInstance(processInstance); + Date scheduleDate = processInstance.getScheduleTime(); + + if(scheduleDate == null){ + scheduleDate = startDate; + } + + while(Stopper.isRunning()){ + // prepare dag and other info + prepareProcess(); + + if(dag == null){ + logger.error("process {} dag is null, please check out parameters", + processInstance.getId()); + processInstance.setState(ExecutionStatus.SUCCESS); + processDao.updateProcessInstance(processInstance); + return; + } + + // execute process ,waiting for end + runProcess(); + + // process instace failure ,no more complements + if(!processInstance.getState().typeIsSuccess()){ + logger.info("process {} state {}, complement not completely!", + processInstance.getId(), processInstance.getState()); + break; + } + + // current process instance sucess ,next execute + scheduleDate = DateUtils.getSomeDay(scheduleDate, 1); + if(scheduleDate.after(endDate)){ + // all success + logger.info("process {} complement completely!", processInstance.getId()); + break; + } + + logger.info("process {} start to complement {} data", + processInstance.getId(), DateUtils.dateToString(scheduleDate)); + // execute next process instance complement data + processInstance.setScheduleTime(scheduleDate); + if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){ + cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); + processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); + } + + List taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId()); + for(TaskInstance taskInstance : taskInstanceList){ + taskInstance.setFlag(Flag.NO); + processDao.updateTaskInstance(taskInstance); + } + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( + processInstance.getProcessDefinition().getGlobalParamMap(), + processInstance.getProcessDefinition().getGlobalParamList(), + CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); + + processDao.saveProcessInstance(processInstance); + } + + // flow end + endProcess(); + + } + + + /** + * prepare process parameter + * @throws Exception + */ + private void prepareProcess() throws Exception { + // init task queue + initTaskQueue(); + + // gen process dag + buildFlowDag(); + logger.info("prepare process :{} end", processInstance.getId()); + } + + + /** + * process end handle + */ + private void endProcess() { + processInstance.setEndTime(new Date()); + processDao.updateProcessInstance(processInstance); + if(processInstance.getState().typeIsWaittingThread()){ + processDao.createRecoveryWaitingThreadCommand(null, processInstance); + } + List taskInstances = processDao.findValidTaskListByProcessId(processInstance.getId()); + alertManager.sendAlertProcessInstance(processInstance, taskInstances); + } + + + /** + * generate process dag + * @throws Exception + */ + private void buildFlowDag() throws Exception { + recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam()); + + forbiddenTaskList = DagHelper.getForbiddenTaskNodeMaps(processInstance.getProcessInstanceJson()); + // generate process to get DAG info + List recoveryNameList = getRecoveryNodeNameList(); + List startNodeNameList = parseStartNodeName(processInstance.getCommandParam()); + ProcessDag processDag = generateFlowDag(processInstance.getProcessInstanceJson(), + startNodeNameList, recoveryNameList, processInstance.getTaskDependType()); + if(processDag == null){ + //TODO... + logger.error("processDag is null"); + return; + } + // generate process dag + dag = DagHelper.buildDagGraph(processDag); + + } + + private void initTaskQueue(){ + + taskFailedSubmit = false; + activeTaskNode.clear(); + dependFailedTask.clear(); + completeTaskList.clear(); + errorTaskList.clear(); + List taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId()); + for(TaskInstance task : taskInstanceList){ + if(task.isTaskComplete()){ + completeTaskList.put(task.getName(), task); + } + if(task.getState().typeIsFailure() && !task.taskCanRetry()){ + errorTaskList.put(task.getName(), task); + } + } + } + + /** + * process post handle + */ + private void postHandle() { + logger.info("develop mode is: {}", CommonUtils.isDevelopMode()); + + if (!CommonUtils.isDevelopMode()) { + // get exec dir + String execLocalPath = org.apache.dolphinscheduler.common.utils.FileUtils + .getProcessExecDir(processInstance.getProcessDefinition().getProjectId(), + processInstance.getProcessDefinitionId(), + processInstance.getId()); + + try { + FileUtils.deleteDirectory(new File(execLocalPath)); + } catch (IOException e) { + logger.error("delete exec dir failed : " + e.getMessage(), e); + } + } + } + + + + /** + * submit task to execute + * @param taskInstance + */ + private TaskInstance submitTaskExec(TaskInstance taskInstance) { + MasterBaseTaskExecThread abstractExecThread = null; + if(taskInstance.isSubProcess()){ + abstractExecThread = new SubProcessTaskExecThread(taskInstance, processInstance); + }else { + abstractExecThread = new MasterTaskExecThread(taskInstance, processInstance); + } + Future future = taskExecService.submit(abstractExecThread); + activeTaskNode.putIfAbsent(abstractExecThread, future); + return abstractExecThread.getTaskInstance(); + } + + /** + * find task instance in db. + * in case submit more than one same name task in the same time. + * @param taskName + * @return + */ + private TaskInstance findTaskIfExists(String taskName){ + List taskInstanceList = processDao.findValidTaskListByProcessId(this.processInstance.getId()); + for(TaskInstance taskInstance : taskInstanceList){ + if(taskInstance.getName().equals(taskName)){ + return taskInstance; + } + } + return null; + } + + /** + * encapsulation task + * @param processInstance + * @param nodeName + * @return + */ + private TaskInstance createTaskInstance(ProcessInstance processInstance, String nodeName, + TaskNode taskNode, String parentNodeName) { + + TaskInstance taskInstance = findTaskIfExists(nodeName); + if(taskInstance == null){ + taskInstance = new TaskInstance(); + // task name + taskInstance.setName(nodeName); + // process instance define id + taskInstance.setProcessDefinitionId(processInstance.getProcessDefinitionId()); + // task instance state + taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); + // process instance id + taskInstance.setProcessInstanceId(processInstance.getId()); + // task instance node json + taskInstance.setTaskJson(JSONObject.toJSONString(taskNode)); + // task instance type + taskInstance.setTaskType(taskNode.getType()); + // task instance whether alert + taskInstance.setAlertFlag(Flag.NO); + + // task instance start time + taskInstance.setStartTime(new Date()); + + // task instance flag + taskInstance.setFlag(Flag.YES); + + // task instance retry times + taskInstance.setRetryTimes(0); + + // max task instance retry times + taskInstance.setMaxRetryTimes(taskNode.getMaxRetryTimes()); + + // retry task instance interval + taskInstance.setRetryInterval(taskNode.getRetryInterval()); + + // task instance priority + if(taskNode.getTaskInstancePriority() == null){ + taskInstance.setTaskInstancePriority(Priority.MEDIUM); + }else{ + taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority()); + } + + int workerGroupId = taskNode.getWorkerGroupId(); + taskInstance.setWorkerGroupId(workerGroupId); + + } + return taskInstance; + } + + + + /** + * get post task instance by node + * + * @param dag + * @param parentNodeName + * @return + */ + private List getPostTaskInstanceByNode(DAG dag, String parentNodeName){ + + List postTaskList = new ArrayList<>(); + Collection startVertex = DagHelper.getStartVertex(parentNodeName, dag, completeTaskList); + if(startVertex == null){ + return postTaskList; + } + + for (String nodeName : startVertex){ + // encapsulation task instance + TaskInstance taskInstance = createTaskInstance(processInstance, nodeName , + dag.getNode(nodeName),parentNodeName); + postTaskList.add(taskInstance); + } + return postTaskList; + } + + /** + * + * return start task node list + * + * @return + */ + private List getStartSubmitTaskList(){ + + List startTaskList = getPostTaskInstanceByNode(dag, null); + + HashMap successTaskMaps = new HashMap<>(); + List resultList = new ArrayList<>(); + while(Stopper.isRunning()){ + for(TaskInstance task : startTaskList){ + if(task.getState().typeIsSuccess()){ + successTaskMaps.put(task.getName(), task); + }else if(!completeTaskList.containsKey(task.getName()) && !errorTaskList.containsKey(task.getName())){ + resultList.add(task); + } + } + startTaskList.clear(); + if(successTaskMaps.size() == 0){ + break; + } + + Set taskNameKeys = successTaskMaps.keySet(); + for(String taskName : taskNameKeys){ + startTaskList.addAll(getPostTaskInstanceByNode(dag, taskName)); + } + successTaskMaps.clear(); + } + return resultList; + } + + /** + * submit post node + * @param parentNodeName + */ + private void submitPostNode(String parentNodeName){ + + List submitTaskList = null; + if(parentNodeName == null){ + submitTaskList = getStartSubmitTaskList(); + }else{ + submitTaskList = getPostTaskInstanceByNode(dag, parentNodeName); + } + // if previous node success , post node submit + for(TaskInstance task : submitTaskList){ + if(readyToSubmitTaskList.containsKey(task.getName())){ + continue; + } + + if(completeTaskList.containsKey(task.getName())){ + logger.info("task {} has already run success", task.getName()); + continue; + } + if(task.getState().typeIsPause() || task.getState().typeIsCancel()){ + logger.info("task {} stopped, the state is {}", task.getName(), task.getState().toString()); + }else{ + addTaskToStandByList(task); + } + } + } + + /** + * determine whether the dependencies of the task node are complete + * @return + */ + private DependResult isTaskDepsComplete(String taskName) { + + Collection startNodes = dag.getBeginNode(); + // ff the vertex returns true directly + if(startNodes.contains(taskName)){ + return DependResult.SUCCESS; + } + + TaskNode taskNode = dag.getNode(taskName); + List depsNameList = taskNode.getDepList(); + for(String depsNode : depsNameList ){ + + if(forbiddenTaskList.containsKey(depsNode)){ + continue; + } + // dependencies must be fully completed + if(!completeTaskList.containsKey(depsNode)){ + return DependResult.WAITING; + } + ExecutionStatus taskState = completeTaskList.get(depsNode).getState(); + if(taskState.typeIsFailure()){ + return DependResult.FAILED; + } + if(taskState.typeIsPause() || taskState.typeIsCancel()){ + return DependResult.WAITING; + } + } + + logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray())); + + return DependResult.SUCCESS; + } + + + /** + * query task instance by complete state + * @param state + * @return + */ + private List getCompleteTaskByState(ExecutionStatus state){ + List resultList = new ArrayList<>(); + Set taskList = completeTaskList.keySet(); + for(String taskName : taskList){ + TaskInstance taskInstance = completeTaskList.get(taskName); + if(taskInstance.getState() == state){ + resultList.add(taskInstance); + } + } + return resultList; + } + + /** + * where there are ongoing tasks + * @param state + * @return + */ + private ExecutionStatus runningState(ExecutionStatus state){ + if(state == ExecutionStatus.READY_STOP || + state == ExecutionStatus.READY_PAUSE || + state == ExecutionStatus.WAITTING_THREAD){ + // if the running task is not completed, the state remains unchanged + return state; + }else{ + return ExecutionStatus.RUNNING_EXEUTION; + } + } + + /** + * exists failure task , contains submit failure、dependency failure,execute failure(retry after) + * + * @return + */ + private Boolean hasFailedTask(){ + + if(this.taskFailedSubmit){ + return true; + } + if(this.errorTaskList.size() > 0){ + return true; + } + return this.dependFailedTask.size() > 0; + } + + /** + * process instance failure + * + * @return + */ + private Boolean processFailed(){ + if(hasFailedTask()) { + if(processInstance.getFailureStrategy() == FailureStrategy.END){ + return true; + } + if (processInstance.getFailureStrategy() == FailureStrategy.CONTINUE) { + return readyToSubmitTaskList.size() == 0 || activeTaskNode.size() == 0; + } + } + return false; + } + + /** + * whether task for waiting thread + * @return + */ + private Boolean hasWaitingThreadTask(){ + + List waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD); + return waitingList.size() > 0; + } + + /** + * prepare for pause + * 1,failed retry task in the preparation queue , returns to failure directly + * 2,exists pause task,complement not completed, pending submission of tasks, return to suspension + * 3,success + * @return + */ + private ExecutionStatus processReadyPause(){ + if(hasRetryTaskInStandBy()){ + return ExecutionStatus.FAILURE; + } + + List pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE); + if(pauseList.size() > 0 + || !isComplementEnd() + || readyToSubmitTaskList.size() > 0){ + return ExecutionStatus.PAUSE; + }else{ + return ExecutionStatus.SUCCESS; + } + } + + + /** + * generate the latest process instance status by the tasks state + * @return + */ + private ExecutionStatus getProcessInstanceState(){ + ProcessInstance instance = processDao.findProcessInstanceById(processInstance.getId()); + ExecutionStatus state = instance.getState(); + + if(activeTaskNode.size() > 0){ + return runningState(state); + } + // process failure + if(processFailed()){ + return ExecutionStatus.FAILURE; + } + + // waiting thread + if(hasWaitingThreadTask()){ + return ExecutionStatus.WAITTING_THREAD; + } + + // pause + if(state == ExecutionStatus.READY_PAUSE){ + return processReadyPause(); + } + + // stop + if(state == ExecutionStatus.READY_STOP){ + List stopList = getCompleteTaskByState(ExecutionStatus.STOP); + List killList = getCompleteTaskByState(ExecutionStatus.KILL); + if(stopList.size() > 0 || killList.size() > 0 || !isComplementEnd()){ + return ExecutionStatus.STOP; + }else{ + return ExecutionStatus.SUCCESS; + } + } + + // success + if(state == ExecutionStatus.RUNNING_EXEUTION){ + if(readyToSubmitTaskList.size() > 0){ + //tasks currently pending submission, no retries, indicating that depend is waiting to complete + return ExecutionStatus.RUNNING_EXEUTION; + }else{ + // if the waiting queue is empty and the status is in progress, then success + return ExecutionStatus.SUCCESS; + } + } + + return state; + } + + /** + * whether complement end + * @return + */ + private Boolean isComplementEnd() { + if(!processInstance.isComplementData()){ + return true; + } + + try { + Map cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); + Date endTime = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); + return processInstance.getScheduleTime().equals(endTime); + } catch (Exception e) { + logger.error("complement end failed : " + e.getMessage(),e); + return false; + } + } + + /** + * updateProcessInstance process instance state + * after each batch of tasks is executed, the status of the process instance is updated + */ + private void updateProcessInstanceState() { + ExecutionStatus state = getProcessInstanceState(); + if(processInstance.getState() != state){ + logger.info( + "work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}", + processInstance.getId(), processInstance.getName(), + processInstance.getState().toString(), state.toString(), + processInstance.getCommandType().toString()); + processInstance.setState(state); + ProcessInstance instance = processDao.findProcessInstanceById(processInstance.getId()); + instance.setState(state); + instance.setProcessDefinition(processInstance.getProcessDefinition()); + processDao.updateProcessInstance(instance); + processInstance = instance; + } + } + + /** + * get task dependency result + * @param taskInstance + * @return + */ + private DependResult getDependResultForTask(TaskInstance taskInstance){ + DependResult inner = isTaskDepsComplete(taskInstance.getName()); + return inner; + } + + /** + * add task to standy list + * @param taskInstance + */ + private void addTaskToStandByList(TaskInstance taskInstance){ + logger.info("add task to stand by list: {}", taskInstance.getName()); + readyToSubmitTaskList.putIfAbsent(taskInstance.getName(), taskInstance); + } + + /** + * remove task from stand by list + * @param taskInstance + */ + private void removeTaskFromStandbyList(TaskInstance taskInstance){ + logger.info("remove task from stand by list: {}", taskInstance.getName()); + readyToSubmitTaskList.remove(taskInstance.getName()); + } + + /** + * has retry task in standby + * @return + */ + private Boolean hasRetryTaskInStandBy(){ + Set taskNameSet = this.readyToSubmitTaskList.keySet(); + for(String taskName : taskNameSet){ + TaskInstance task = this.readyToSubmitTaskList.get(taskName); + if(task.getState().typeIsFailure()){ + return true; + } + } + return false; + } + + /** + * submit and watch the tasks, until the work flow stop + */ + private void runProcess(){ + // submit start node + submitPostNode(null); + boolean sendTimeWarning = false; + while(!processInstance.IsProcessInstanceStop()){ + + // send warning email if process time out. + if( !sendTimeWarning && checkProcessTimeOut(processInstance) ){ + alertManager.sendProcessTimeoutAlert(processInstance, + processDao.findProcessDefineById(processInstance.getProcessDefinitionId())); + sendTimeWarning = true; + } + Set keys = activeTaskNode.keySet(); + for (MasterBaseTaskExecThread taskExecThread : keys) { + Future future = activeTaskNode.get(taskExecThread); + TaskInstance task = taskExecThread.getTaskInstance(); + + if(!future.isDone()){ + continue; + } + // node monitor thread complete + activeTaskNode.remove(taskExecThread); + if(task == null){ + this.taskFailedSubmit = true; + continue; + } + logger.info("task :{}, id:{} complete, state is {} ", + task.getName(), task.getId(), task.getState().toString()); + // node success , post node submit + if(task.getState() == ExecutionStatus.SUCCESS){ + completeTaskList.put(task.getName(), task); + submitPostNode(task.getName()); + continue; + } + // node fails, retry first, and then execute the failure process + if(task.getState().typeIsFailure()){ + if(task.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE){ + this.recoverToleranceFaultTaskList.add(task); + } + if(task.taskCanRetry()){ + addTaskToStandByList(task); + }else{ + // node failure, based on failure strategy + errorTaskList.put(task.getName(), task); + completeTaskList.put(task.getName(), task); + if(processInstance.getFailureStrategy() == FailureStrategy.END){ + kill(); + } + } + continue; + } + // other status stop/pause + completeTaskList.put(task.getName(), task); + } + // send alert + if(this.recoverToleranceFaultTaskList.size() > 0){ + alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList); + this.recoverToleranceFaultTaskList.clear(); + } + // updateProcessInstance completed task status + // failure priority is higher than pause + // if a task fails, other suspended tasks need to be reset kill + if(errorTaskList.size() > 0){ + for(String taskName : completeTaskList.keySet()){ + TaskInstance completeTask = completeTaskList.get(taskName); + if(completeTask.getState()== ExecutionStatus.PAUSE){ + completeTask.setState(ExecutionStatus.KILL); + completeTaskList.put(taskName, completeTask); + processDao.updateTaskInstance(completeTask); + } + } + } + if(canSubmitTaskToQueue()){ + submitStandByTask(); + } + try { + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + } catch (InterruptedException e) { + logger.error(e.getMessage(),e); + } + updateProcessInstanceState(); + } + + logger.info("process:{} end, state :{}", processInstance.getId(), processInstance.getState()); + } + + /** + * check process time out + * @param processInstance + * @return + */ + private boolean checkProcessTimeOut(ProcessInstance processInstance) { + if(processInstance.getTimeout() == 0 ){ + return false; + } + + Date now = new Date(); + long runningTime = DateUtils.diffMin(now, processInstance.getStartTime()); + + if(runningTime > processInstance.getTimeout()){ + return true; + } + return false; + } + + private boolean canSubmitTaskToQueue() { + return OSUtils.checkResource(conf, true); + } + + + /** + * close the ongoing tasks + */ + private void kill() { + + logger.info("kill called on process instance id: {}, num: {}", processInstance.getId(), + activeTaskNode.size()); + for (Map.Entry> entry : activeTaskNode.entrySet()) { + + MasterBaseTaskExecThread taskExecThread = entry.getKey(); + Future future = entry.getValue(); + + if (!future.isDone()) { + // record kill info + logger.info("kill process instance, id: {}, task: {}", processInstance.getId(), taskExecThread.getTaskInstance().getId()); + + // kill node + taskExecThread.kill(); + } + } + } + + /** + * whether the retry interval is timed out + * @param taskInstance + * @return + */ + private Boolean retryTaskIntervalOverTime(TaskInstance taskInstance){ + if(taskInstance.getState() != ExecutionStatus.FAILURE){ + return Boolean.TRUE; + } + if(taskInstance.getId() == 0 || + taskInstance.getMaxRetryTimes() ==0 || + taskInstance.getRetryInterval() == 0 ){ + return Boolean.TRUE; + } + Date now = new Date(); + long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime()); + // task retry does not over time, return false + if(taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT >= failedTimeInterval){ + return Boolean.FALSE; + } + return Boolean.TRUE; + } + + /** + * handling the list of tasks to be submitted + */ + private void submitStandByTask(){ + Set readySubmitTaskNames = readyToSubmitTaskList.keySet(); + for(String readySubmitTaskName : readySubmitTaskNames){ + TaskInstance task = readyToSubmitTaskList.get(readySubmitTaskName); + DependResult dependResult = getDependResultForTask(task); + if(DependResult.SUCCESS == dependResult){ + if(retryTaskIntervalOverTime(task)){ + submitTaskExec(task); + removeTaskFromStandbyList(task); + } + }else if(DependResult.FAILED == dependResult){ + // if the dependency fails, the current node is not submitted and the state changes to failure. + dependFailedTask.put(readySubmitTaskName, task); + removeTaskFromStandbyList(task); + logger.info("task {},id:{} depend result : {}",task.getName(), task.getId(), dependResult); + } + } + } + + private TaskInstance getRecoveryTaskInstance(String taskId){ + if(!StringUtils.isNotEmpty(taskId)){ + return null; + } + try { + Integer intId = Integer.valueOf(taskId); + TaskInstance task = processDao.findTaskInstanceById(intId); + if(task == null){ + logger.error("start node id cannot be found: {}", taskId); + }else { + return task; + } + }catch (Exception e){ + logger.error("get recovery task instance failed : " + e.getMessage(),e); + } + return null; + } + + /** + * get start task instance list + * @param cmdParam + * @return + */ + private List getStartTaskInstanceList( String cmdParam){ + + List instanceList = new ArrayList<>(); + Map paramMap = JSONUtils.toMap(cmdParam); + + if(paramMap != null && paramMap.containsKey(CMDPARAM_RECOVERY_START_NODE_STRING)){ + String[] idList = paramMap.get(CMDPARAM_RECOVERY_START_NODE_STRING).split(Constants.COMMA); + for(String nodeId : idList){ + TaskInstance task = getRecoveryTaskInstance(nodeId); + if(task != null){ + instanceList.add(task); + } + } + } + return instanceList; + } + + /** + * parse "StartNodeNameList" from cmd param + * @param cmdParam + * @return + */ + private List parseStartNodeName(String cmdParam){ + List startNodeNameList = new ArrayList<>(); + Map paramMap = JSONUtils.toMap(cmdParam); + if(paramMap == null){ + return startNodeNameList; + } + if(paramMap.containsKey(CMDPARAM_START_NODE_NAMES)){ + startNodeNameList = Arrays.asList(paramMap.get(CMDPARAM_START_NODE_NAMES).split(Constants.COMMA)); + } + return startNodeNameList; + } + + /** + * generate start node name list from parsing command param; + * if "StartNodeIdList" exists in command param, return StartNodeIdList + * @return + */ + private List getRecoveryNodeNameList(){ + List recoveryNodeNameList = new ArrayList<>(); + if(recoverNodeIdList.size() > 0) { + for (TaskInstance task : recoverNodeIdList) { + recoveryNodeNameList.add(task.getName()); + } + } + return recoveryNodeNameList; + } + + /** + * generate flow dag + * @param processDefinitionJson + * @return + * @throws Exception + */ + public ProcessDag generateFlowDag(String processDefinitionJson, + List startNodeNameList, + List recoveryNodeNameList, + TaskDependType depNodeType)throws Exception{ + return DagHelper.generateFlowDag(processDefinitionJson, startNodeNameList, recoveryNodeNameList, depNodeType); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java new file mode 100644 index 0000000000..cbc215fd62 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.runner; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.common.zk.AbstractZKClient; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.server.zk.ZKMasterClient; +import org.apache.commons.configuration.Configuration; +import org.apache.curator.framework.imps.CuratorFrameworkState; +import org.apache.curator.framework.recipes.locks.InterProcessMutex; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadPoolExecutor; + +/** + * master scheduler thread + */ +public class MasterSchedulerThread implements Runnable { + + private static final Logger logger = LoggerFactory.getLogger(MasterSchedulerThread.class); + + private final ExecutorService masterExecService; + + /** + * escheduler database interface + */ + private final ProcessDao processDao; + + private final ZKMasterClient zkMasterClient ; + + private int masterExecThreadNum; + + private final Configuration conf; + + + public MasterSchedulerThread(ZKMasterClient zkClient, ProcessDao processDao, Configuration conf, int masterExecThreadNum){ + this.processDao = processDao; + this.zkMasterClient = zkClient; + this.conf = conf; + this.masterExecThreadNum = masterExecThreadNum; + this.masterExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Exec-Thread",masterExecThreadNum); + } + + + @Override + public void run() { + while (Stopper.isRunning()){ + + // process instance + ProcessInstance processInstance = null; + + InterProcessMutex mutex = null; + try { + + if(OSUtils.checkResource(conf, true)){ + if (zkMasterClient.getZkClient().getState() == CuratorFrameworkState.STARTED) { + + // create distributed lock with the root node path of the lock space as /escheduler/lock/failover/master + String znodeLock = zkMasterClient.getMasterLockPath(); + + mutex = new InterProcessMutex(zkMasterClient.getZkClient(), znodeLock); + mutex.acquire(); + + ThreadPoolExecutor poolExecutor = (ThreadPoolExecutor) masterExecService; + int activeCount = poolExecutor.getActiveCount(); + // make sure to scan and delete command table in one transaction + processInstance = processDao.scanCommand(logger, OSUtils.getHost(), this.masterExecThreadNum - activeCount); + if (processInstance != null) { + logger.info("start master exex thread , split DAG ..."); + masterExecService.execute(new MasterExecThread(processInstance)); + } + } + } + + // accessing the command table every SLEEP_TIME_MILLIS milliseconds + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + + }catch (Exception e){ + logger.error("master scheduler thread exception : " + e.getMessage(),e); + }finally{ + AbstractZKClient.releaseMutex(mutex); + } + } + } + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java new file mode 100644 index 0000000000..cb794ab7dc --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.runner; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import com.alibaba.fastjson.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; + +import static org.apache.dolphinscheduler.common.Constants.SCHEDULER_TASKS_KILL; + +/** + * master task exec thread + */ +public class MasterTaskExecThread extends MasterBaseTaskExecThread { + + private static final Logger logger = LoggerFactory.getLogger(MasterTaskExecThread.class); + + + public MasterTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ + super(taskInstance, processInstance); + } + + /** + * get task instance + * @return + */ + @Override + public TaskInstance getTaskInstance(){ + return this.taskInstance; + } + + private Boolean alreadyKilled = false; + + @Override + public Boolean submitWaitComplete() { + Boolean result = false; + this.taskInstance = submit(); + if(!this.taskInstance.getState().typeIsFinished()) { + result = waitTaskQuit(); + } + taskInstance.setEndTime(new Date()); + processDao.updateTaskInstance(taskInstance); + logger.info("task :{} id:{}, process id:{}, exec thread completed ", + this.taskInstance.getName(),taskInstance.getId(), processInstance.getId() ); + return result; + } + + + public Boolean waitTaskQuit(){ + // query new state + taskInstance = processDao.findTaskInstanceById(taskInstance.getId()); + Boolean result = true; + // task time out + Boolean checkTimeout = false; + TaskTimeoutParameter taskTimeoutParameter = getTaskTimeoutParameter(); + if(taskTimeoutParameter.getEnable()){ + TaskTimeoutStrategy strategy = taskTimeoutParameter.getStrategy(); + if(strategy == TaskTimeoutStrategy.WARN || strategy == TaskTimeoutStrategy.WARNFAILED){ + checkTimeout = true; + } + } + + while (Stopper.isRunning()){ + try { + if(this.processInstance == null){ + logger.error("process instance not exists , master task exec thread exit"); + return result; + } + // task instance add queue , waiting worker to kill + if(this.cancel || this.processInstance.getState() == ExecutionStatus.READY_STOP){ + cancelTaskInstance(); + } + // task instance finished + if (taskInstance.getState().typeIsFinished()){ + break; + } + if(checkTimeout){ + long remainTime = getRemaintime(taskTimeoutParameter.getInterval()*60); + if (remainTime < 0) { + logger.warn("task id: {} execution time out",taskInstance.getId()); + // process define + ProcessDefinition processDefine = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); + // send warn mail + alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(),processDefine.getReceivers(),processDefine.getReceiversCc(),taskInstance.getId(),taskInstance.getName()); + checkTimeout = false; + } + } + // updateProcessInstance task instance + taskInstance = processDao.findTaskInstanceById(taskInstance.getId()); + processInstance = processDao.findProcessInstanceById(processInstance.getId()); + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + } catch (Exception e) { + logger.error("exception: "+ e.getMessage(),e); + logger.error("wait task quit failed, instance id:{}, task id:{}", + processInstance.getId(), taskInstance.getId()); + } + } + return result; + } + + + /** + * task instance add queue , waiting worker to kill + */ + private void cancelTaskInstance(){ + if(alreadyKilled || taskInstance.getHost() == null){ + return ; + } + alreadyKilled = true; + String queueValue = String.format("%s-%d", + taskInstance.getHost(), taskInstance.getId()); + taskQueue.sadd(SCHEDULER_TASKS_KILL, queueValue); + + logger.info("master add kill task :{} id:{} to kill queue", + taskInstance.getName(), taskInstance.getId() ); + } + + /** + * get task timeout parameter + * @return + */ + private TaskTimeoutParameter getTaskTimeoutParameter(){ + String taskJson = taskInstance.getTaskJson(); + TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); + return taskNode.getTaskTimeoutParameter(); + } + + + /** + * get remain time(s) + * + * @return + */ + private long getRemaintime(long timeoutSeconds) { + Date startTime = taskInstance.getStartTime(); + long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000; + long remainTime = timeoutSeconds - usedTime; + return remainTime; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java new file mode 100644 index 0000000000..7f0737a463 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java @@ -0,0 +1,178 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.runner; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; + +/** + * subflow task exec thread + */ +public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { + + + private static final Logger logger = LoggerFactory.getLogger(SubProcessTaskExecThread.class); + + + private ProcessInstance subProcessInstance; + + public SubProcessTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ + super(taskInstance, processInstance); + } + + @Override + public Boolean submitWaitComplete() { + + Boolean result = false; + try{ + // submit task instance + this.taskInstance = submit(); + + if(taskInstance == null){ + logger.error("sub work flow submit task instance to mysql and queue failed , please check and fix it"); + return result; + } + setTaskInstanceState(); + waitTaskQuit(); + subProcessInstance = processDao.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); + + // at the end of the subflow , the task state is changed to the subflow state + if(subProcessInstance != null){ + if(subProcessInstance.getState() == ExecutionStatus.STOP){ + this.taskInstance.setState(ExecutionStatus.KILL); + }else{ + this.taskInstance.setState(subProcessInstance.getState()); + result = true; + } + } + taskInstance.setEndTime(new Date()); + processDao.updateTaskInstance(taskInstance); + logger.info("subflow task :{} id:{}, process id:{}, exec thread completed ", + this.taskInstance.getName(),taskInstance.getId(), processInstance.getId() ); + result = true; + + }catch (Exception e){ + logger.error("exception: "+ e.getMessage(),e); + logger.error("wait task quit failed, instance id:{}, task id:{}", + processInstance.getId(), taskInstance.getId()); + } + return result; + } + + + /** + * set task instance state + * @return + */ + private Boolean setTaskInstanceState(){ + subProcessInstance = processDao.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); + if(subProcessInstance == null || taskInstance.getState().typeIsFinished()){ + return false; + } + + taskInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + taskInstance.setStartTime(new Date()); + processDao.updateTaskInstance(taskInstance); + return true; + } + + /** + * updateProcessInstance parent state + */ + private void updateParentProcessState(){ + ProcessInstance parentProcessInstance = processDao.findProcessInstanceById(this.processInstance.getId()); + + if(parentProcessInstance == null){ + logger.error("parent work flow instance is null , please check it! work flow id {}", processInstance.getId()); + return; + } + this.processInstance.setState(parentProcessInstance.getState()); + } + + /** + * wait task quit + * @throws InterruptedException + */ + private void waitTaskQuit() throws InterruptedException { + + logger.info("wait sub work flow: {} complete", this.taskInstance.getName()); + + if (taskInstance.getState().typeIsFinished()) { + logger.info("sub work flow task {} already complete. task state:{}, parent work flow instance state:{}", + this.taskInstance.getName(), + this.taskInstance.getState().toString(), + this.processInstance.getState().toString()); + return; + } + while (Stopper.isRunning()) { + // waiting for subflow process instance establishment + if (subProcessInstance == null) { + + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + + if(!setTaskInstanceState()){ + continue; + } + } + subProcessInstance = processDao.findProcessInstanceById(subProcessInstance.getId()); + updateParentProcessState(); + if (subProcessInstance.getState().typeIsFinished()){ + break; + } + + if(this.processInstance.getState() == ExecutionStatus.READY_PAUSE){ + // parent process "ready to pause" , child process "pause" + pauseSubProcess(); + }else if(this.cancel || this.processInstance.getState() == ExecutionStatus.READY_STOP){ + // parent Process "Ready to Cancel" , subflow "Cancel" + stopSubProcess(); + } + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + } + } + + /** + * stop subflow + */ + private void stopSubProcess() { + if(subProcessInstance.getState() == ExecutionStatus.STOP || + subProcessInstance.getState() == ExecutionStatus.READY_STOP){ + return; + } + subProcessInstance.setState(ExecutionStatus.READY_STOP); + processDao.updateProcessInstance(subProcessInstance); + } + + /** + * pause subflow + */ + private void pauseSubProcess() { + if(subProcessInstance.getState() == ExecutionStatus.PAUSE || + subProcessInstance.getState() == ExecutionStatus.READY_PAUSE){ + return; + } + subProcessInstance.setState(ExecutionStatus.READY_PAUSE); + processDao.updateProcessInstance(subProcessInstance); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/DruidConnectionProvider.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/DruidConnectionProvider.java new file mode 100644 index 0000000000..c328596761 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/DruidConnectionProvider.java @@ -0,0 +1,203 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.quartz; + +import com.alibaba.druid.pool.DruidDataSource; +import org.quartz.SchedulerException; +import java.sql.Connection; +import java.sql.SQLException; +import org.quartz.utils.ConnectionProvider; + +/** + * druid connection provider + */ +public class DruidConnectionProvider implements ConnectionProvider { + + /** + * JDBC driver + */ + public String driver; + + /** + * JDBC URL + */ + public String URL; + + /** + * Database user name + */ + public String user; + + /** + * Database password + */ + public String password; + + /** + * Maximum number of database connections + */ + public int maxConnections; + + /** + * The query that validates the database connection + */ + public String validationQuery; + + /** + * Whether the database sql query to validate connections should be executed every time + * a connection is retrieved from the pool to ensure that it is still valid. If false, + * then validation will occur on check-in. Default is false. + */ + private boolean validateOnCheckout; + + /** + * The number of seconds between tests of idle connections - only enabled + * if the validation query property is set. Default is 50 seconds. + */ + private int idleConnectionValidationSeconds; + + /** + * The maximum number of prepared statements that will be cached per connection in the pool. + * Depending upon your JDBC Driver this may significantly help performance, or may slightly + * hinder performance. + * Default is 120, as Quartz uses over 100 unique statements. 0 disables the feature. + */ + public String maxCachedStatementsPerConnection; + + /** + * Discard connections after they have been idle this many seconds. 0 disables the feature. Default is 0. + */ + private String discardIdleConnectionsSeconds; + + /** + * Default maximum number of database connections in the pool. + */ + public static final int DEFAULT_DB_MAX_CONNECTIONS = 10; + + /** + * The maximum number of prepared statements that will be cached per connection in the pool. + */ + public static final int DEFAULT_DB_MAX_CACHED_STATEMENTS_PER_CONNECTION = 120; + + /** + * Druid connection pool + */ + private DruidDataSource datasource; + + public Connection getConnection() throws SQLException { + return datasource.getConnection(); + } + public void shutdown() throws SQLException { + datasource.close(); + } + public void initialize() throws SQLException{ + if (this.URL == null) { + throw new SQLException("DBPool could not be created: DB URL cannot be null"); + } + if (this.driver == null) { + throw new SQLException("DBPool driver could not be created: DB driver class name cannot be null!"); + } + if (this.maxConnections < 0) { + throw new SQLException("DBPool maxConnectins could not be created: Max connections must be greater than zero!"); + } + datasource = new DruidDataSource(); + try{ + datasource.setDriverClassName(this.driver); + } catch (Exception e) { + try { + throw new SchedulerException("Problem setting driver class name on datasource: " + e.getMessage(), e); + } catch (SchedulerException e1) { + } + } + datasource.setUrl(this.URL); + datasource.setUsername(this.user); + datasource.setPassword(this.password); + datasource.setMaxActive(this.maxConnections); + datasource.setMinIdle(1); + datasource.setMaxWait(0); + datasource.setMaxPoolPreparedStatementPerConnectionSize(DEFAULT_DB_MAX_CONNECTIONS); + if (this.validationQuery != null) { + datasource.setValidationQuery(this.validationQuery); + if(!this.validateOnCheckout) + datasource.setTestOnReturn(true); + else + datasource.setTestOnBorrow(true); + datasource.setValidationQueryTimeout(this.idleConnectionValidationSeconds); + } + } + + public String getDriver() { + return driver; + } + public void setDriver(String driver) { + this.driver = driver; + } + public String getURL() { + return URL; + } + public void setURL(String URL) { + this.URL = URL; + } + public String getUser() { + return user; + } + public void setUser(String user) { + this.user = user; + } + public String getPassword() { + return password; + } + public void setPassword(String password) { + this.password = password; + } + public int getMaxConnections() { + return maxConnections; + } + public void setMaxConnections(int maxConnections) { + this.maxConnections = maxConnections; + } + public String getValidationQuery() { + return validationQuery; + } + public void setValidationQuery(String validationQuery) { + this.validationQuery = validationQuery; + } + public boolean isValidateOnCheckout() { + return validateOnCheckout; + } + public void setValidateOnCheckout(boolean validateOnCheckout) { + this.validateOnCheckout = validateOnCheckout; + } + public int getIdleConnectionValidationSeconds() { + return idleConnectionValidationSeconds; + } + public void setIdleConnectionValidationSeconds(int idleConnectionValidationSeconds) { + this.idleConnectionValidationSeconds = idleConnectionValidationSeconds; + } + public DruidDataSource getDatasource() { + return datasource; + } + public void setDatasource(DruidDataSource datasource) { + this.datasource = datasource; + } + public String getDiscardIdleConnectionsSeconds() { + return discardIdleConnectionsSeconds; + } + public void setDiscardIdleConnectionsSeconds(String discardIdleConnectionsSeconds) { + this.discardIdleConnectionsSeconds = discardIdleConnectionsSeconds; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/ProcessScheduleJob.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/ProcessScheduleJob.java new file mode 100644 index 0000000000..828a54d7b8 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/ProcessScheduleJob.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.quartz; + + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.quartz.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.util.Assert; + +import java.util.Date; + +/** + * process schedule job + *

+ * {@link Job} + *

+ */ +public class ProcessScheduleJob implements Job { + + private static final Logger logger = LoggerFactory.getLogger(ProcessScheduleJob.class); + + /** + * {@link ProcessDao} + */ + private static ProcessDao processDao; + + + /** + * init + */ + public static void init(ProcessDao processDao) { + ProcessScheduleJob.processDao = processDao; + } + + /** + *

+ * Called by the {@link Scheduler} when a {@link Trigger} + * fires that is associated with the Job. + *

+ * + *

+ * The implementation may wish to set a + * {@link JobExecutionContext#setResult(Object) result} object on the + * {@link JobExecutionContext} before this method exits. The result itself + * is meaningless to Quartz, but may be informative to + * {@link JobListener}s or + * {@link TriggerListener}s that are watching the job's + * execution. + *

+ * + * @throws JobExecutionException if there is an exception while executing the job. + */ + @Override + public void execute(JobExecutionContext context) throws JobExecutionException { + + //TODO... + Assert.notNull(processDao, "please call init() method first"); + + JobDataMap dataMap = context.getJobDetail().getJobDataMap(); + + int projectId = dataMap.getInt(Constants.PROJECT_ID); + int scheduleId = dataMap.getInt(Constants.SCHEDULE_ID); + + + Date scheduledFireTime = context.getScheduledFireTime(); + + + Date fireTime = context.getFireTime(); + + logger.info("scheduled fire time :{}, fire time :{}, process id :{}", scheduledFireTime, fireTime, scheduleId); + + // query schedule + Schedule schedule = processDao.querySchedule(scheduleId); + if (schedule == null) { + logger.warn("process schedule does not exist in db,delete schedule job in quartz, projectId:{}, scheduleId:{}", projectId, scheduleId); + deleteJob(projectId, scheduleId); + return; + } + + + ProcessDefinition processDefinition = processDao.findProcessDefineById(schedule.getProcessDefinitionId()); + // release state : online/offline + ReleaseState releaseState = processDefinition.getReleaseState(); + if (processDefinition == null || releaseState == ReleaseState.OFFLINE) { + logger.warn("process definition does not exist in db or offline,need not to create command, projectId:{}, processId:{}", projectId, scheduleId); + return; + } + + Command command = new Command(); + command.setCommandType(CommandType.SCHEDULER); + command.setExecutorId(schedule.getUserId()); + command.setFailureStrategy(schedule.getFailureStrategy()); + command.setProcessDefinitionId(schedule.getProcessDefinitionId()); + command.setScheduleTime(scheduledFireTime); + command.setStartTime(fireTime); + command.setWarningGroupId(schedule.getWarningGroupId()); + command.setWorkerGroupId(schedule.getWorkerGroupId()); + command.setWarningType(schedule.getWarningType()); + command.setProcessInstancePriority(schedule.getProcessInstancePriority()); + + processDao.createCommand(command); + } + + + /** + * delete job + */ + private void deleteJob(int projectId, int scheduleId) { + String jobName = QuartzExecutors.buildJobName(scheduleId); + String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); + QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/QuartzExecutors.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/QuartzExecutors.java new file mode 100644 index 0000000000..8e22ead291 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/quartz/QuartzExecutors.java @@ -0,0 +1,311 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.quartz; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.commons.lang.StringUtils; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.quartz.impl.matchers.GroupMatcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.Calendar; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +import static org.quartz.CronScheduleBuilder.cronSchedule; +import static org.quartz.JobBuilder.newJob; +import static org.quartz.TriggerBuilder.newTrigger; + +/** + * single Quartz executors instance + */ +public class QuartzExecutors { + + private static final Logger logger = LoggerFactory.getLogger(QuartzExecutors.class); + + private final ReadWriteLock lock = new ReentrantReadWriteLock(); + + + /** + *

+ * A Scheduler maintains a registry of {@link org.quartz.JobDetail}s + * and {@link Trigger}s. Once registered, the Scheduler + * is responsible for executing Job s when their associated + * Trigger s fire (when their scheduled time arrives). + *

+ * {@link Scheduler} + */ + private static Scheduler scheduler; + + private static volatile QuartzExecutors INSTANCE = null; + + private QuartzExecutors() {} + + /** + * thread safe and performance promote + * @return + */ + public static QuartzExecutors getInstance() { + if (INSTANCE == null) { + synchronized (QuartzExecutors.class) { + // when more than two threads run into the first null check same time, to avoid instanced more than one time, it needs to be checked again. + if (INSTANCE == null) { + INSTANCE = new QuartzExecutors(); + //finish QuartzExecutors init + INSTANCE.init(); + } + } + } + return INSTANCE; + } + + + /** + * init + * + *

+ * Returns a client-usable handle to a Scheduler. + *

+ */ + private void init() { + try { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(Constants.QUARTZ_PROPERTIES_PATH); + scheduler = schedulerFactory.getScheduler(); + + } catch (SchedulerException e) { + logger.error(e.getMessage(),e); + System.exit(1); + } + + } + + /** + * Whether the scheduler has been started. + * + *

+ * Note: This only reflects whether {@link #start()} has ever + * been called on this Scheduler, so it will return true even + * if the Scheduler is currently in standby mode or has been + * since shutdown. + *

+ * + * @see Scheduler#start() + */ + public void start() throws SchedulerException { + if (!scheduler.isStarted()){ + scheduler.start(); + logger.info("Quartz service started" ); + } + } + + /** + * stop all scheduled tasks + * + * Halts the Scheduler's firing of {@link Trigger}s, + * and cleans up all resources associated with the Scheduler. Equivalent to + * shutdown(false). + * + *

+ * The scheduler cannot be re-started. + *

+ * + */ + public void shutdown() throws SchedulerException { + if (!scheduler.isShutdown()) { + // don't wait for the task to complete + scheduler.shutdown(); + logger.info("Quartz service stopped, and halt all tasks"); + } + } + + + /** + * add task trigger , if this task already exists, return this task with updated trigger + * + * @param clazz job class name + * @param jobName job name + * @param jobGroupName job group name + * @param startDate job start date + * @param endDate job end date + * @param cronExpression cron expression + * @param jobDataMap job parameters data map + * @return + */ + public void addJob(Class clazz,String jobName,String jobGroupName,Date startDate, Date endDate, + String cronExpression, + Map jobDataMap) { + lock.writeLock().lock(); + try { + + JobKey jobKey = new JobKey(jobName, jobGroupName); + JobDetail jobDetail; + //add a task (if this task already exists, return this task directly) + if (scheduler.checkExists(jobKey)) { + + jobDetail = scheduler.getJobDetail(jobKey); + if (jobDataMap != null) { + jobDetail.getJobDataMap().putAll(jobDataMap); + } + } else { + jobDetail = newJob(clazz).withIdentity(jobKey).build(); + + if (jobDataMap != null) { + jobDetail.getJobDataMap().putAll(jobDataMap); + } + + scheduler.addJob(jobDetail, false, true); + + logger.info("Add job, job name: {}, group name: {}", + jobName, jobGroupName); + } + + TriggerKey triggerKey = new TriggerKey(jobName, jobGroupName); + /** + * Instructs the {@link Scheduler} that upon a mis-fire + * situation, the {@link CronTrigger} wants to have it's + * next-fire-time updated to the next time in the schedule after the + * current time (taking into account any associated {@link Calendar}, + * but it does not want to be fired now. + */ + CronTrigger cronTrigger = newTrigger().withIdentity(triggerKey).startAt(startDate).endAt(endDate) + .withSchedule(cronSchedule(cronExpression).withMisfireHandlingInstructionDoNothing()) + .forJob(jobDetail).build(); + + if (scheduler.checkExists(triggerKey)) { + // updateProcessInstance scheduler trigger when scheduler cycle changes + CronTrigger oldCronTrigger = (CronTrigger) scheduler.getTrigger(triggerKey); + String oldCronExpression = oldCronTrigger.getCronExpression(); + + if (!StringUtils.equalsIgnoreCase(cronExpression,oldCronExpression)) { + // reschedule job trigger + scheduler.rescheduleJob(triggerKey, cronTrigger); + logger.info("reschedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", + jobName, jobGroupName, cronExpression, startDate, endDate); + } + } else { + scheduler.scheduleJob(cronTrigger); + logger.info("schedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", + jobName, jobGroupName, cronExpression, startDate, endDate); + } + + } catch (Exception e) { + logger.error("add job failed", e); + throw new RuntimeException("add job failed:"+e.getMessage()); + } finally { + lock.writeLock().unlock(); + } + } + + + /** + * delete job + * + * @param jobName + * @param jobGroupName + * @return true if the Job was found and deleted. + */ + public boolean deleteJob(String jobName, String jobGroupName) { + lock.writeLock().lock(); + try { + JobKey jobKey = new JobKey(jobName,jobGroupName); + if(scheduler.checkExists(jobKey)){ + logger.info("try to delete job, job name: {}, job group name: {},", jobName, jobGroupName); + return scheduler.deleteJob(jobKey); + }else { + return true; + } + + } catch (SchedulerException e) { + logger.error(String.format("delete job : %s failed",jobName), e); + } finally { + lock.writeLock().unlock(); + } + return false; + } + + /** + * delete all jobs in job group + *

+ * Note that while this bulk operation is likely more efficient than + * invoking deleteJob(JobKey jobKey) several + * times, it may have the adverse affect of holding data locks for a + * single long duration of time (rather than lots of small durations + * of time). + *

+ * + * @param jobGroupName + * + * @return true if all of the Jobs were found and deleted, false if + * one or more were not deleted. + */ + public boolean deleteAllJobs(String jobGroupName) { + lock.writeLock().lock(); + try { + logger.info("try to delete all jobs in job group: {}", jobGroupName); + List jobKeys = new ArrayList<>(); + jobKeys.addAll(scheduler.getJobKeys(GroupMatcher.groupEndsWith(jobGroupName))); + + return scheduler.deleteJobs(jobKeys); + } catch (SchedulerException e) { + logger.error(String.format("delete all jobs in job group: %s failed",jobGroupName), e); + } finally { + lock.writeLock().unlock(); + } + return false; + } + + /** + * build job name + */ + public static String buildJobName(int processId) { + StringBuilder sb = new StringBuilder(30); + sb.append(Constants.QUARTZ_JOB_PRIFIX).append(Constants.UNDERLINE).append(processId); + return sb.toString(); + } + + /** + * build job group name + */ + public static String buildJobGroupName(int projectId) { + StringBuilder sb = new StringBuilder(30); + sb.append(Constants.QUARTZ_JOB_GROUP_PRIFIX).append(Constants.UNDERLINE).append(projectId); + return sb.toString(); + } + + /** + * add params to map + * + * @param projectId + * @param scheduleId + * @param schedule + * @return + */ + public static Map buildDataMap(int projectId, int scheduleId, Schedule schedule) { + Map dataMap = new HashMap<>(3); + dataMap.put(Constants.PROJECT_ID, projectId); + dataMap.put(Constants.SCHEDULE_ID, scheduleId); + dataMap.put(Constants.SCHEDULE, JSONUtils.toJson(schedule)); + + return dataMap; + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LogClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LogClient.java new file mode 100644 index 0000000000..6bb1a14a7c --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LogClient.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.rpc; + +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.StatusRuntimeException; +import org.apache.dolphinscheduler.rpc.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.TimeUnit; + +/** + * log client + */ +public class LogClient { + + private static final Logger logger = LoggerFactory.getLogger(LogClient.class); + + private final ManagedChannel channel; + private final LogViewServiceGrpc.LogViewServiceBlockingStub blockingStub; + + /** Construct client connecting to HelloWorld server at {@code host:port}. */ + public LogClient(String host, int port) { + this(ManagedChannelBuilder.forAddress(host, port) + // Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid + // needing certificates. + .usePlaintext(true)); + } + + /** Construct client for accessing RouteGuide server using the existing channel. */ + LogClient(ManagedChannelBuilder channelBuilder) { + /** + * set max message read size + */ + channelBuilder.maxInboundMessageSize(Integer.MAX_VALUE); + channel = channelBuilder.build(); + blockingStub = LogViewServiceGrpc.newBlockingStub(channel); + } + + public void shutdown() throws InterruptedException { + channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); + } + + /** + * roll view log + * @param path + * @param skipLineNum + * @param limit + * @return + */ + public String rollViewLog(String path,int skipLineNum,int limit) { + logger.info("roll view log , path : {},skipLineNum : {} ,limit :{}", path, skipLineNum, limit); + LogParameter pathParameter = LogParameter + .newBuilder() + .setPath(path) + .setSkipLineNum(skipLineNum) + .setLimit(limit) + .build(); + RetStrInfo retStrInfo; + try { + retStrInfo = blockingStub.rollViewLog(pathParameter); + return retStrInfo.getMsg(); + } catch (StatusRuntimeException e) { + logger.error("roll view log failed : " + e.getMessage(), e); + return null; + } + } + + /** + * view all log + * @param path + * @return + */ + public String viewLog(String path) { + logger.info("view log path : {}",path); + + PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); + RetStrInfo retStrInfo; + try { + retStrInfo = blockingStub.viewLog(pathParameter); + return retStrInfo.getMsg(); + } catch (StatusRuntimeException e) { + logger.error("view log failed : " + e.getMessage(), e); + return null; + } + } + + /** + * get log bytes + * @param path + * @return + */ + public byte[] getLogBytes(String path) { + logger.info("get log bytes {}",path); + + PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); + RetByteInfo retByteInfo; + try { + retByteInfo = blockingStub.getLogBytes(pathParameter); + return retByteInfo.getData().toByteArray(); + } catch (StatusRuntimeException e) { + logger.error("get log bytes failed : " + e.getMessage(), e); + return null; + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LoggerServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LoggerServer.java new file mode 100644 index 0000000000..ace34a2eeb --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LoggerServer.java @@ -0,0 +1,219 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.rpc; + +import io.grpc.stub.StreamObserver; +import org.apache.dolphinscheduler.common.Constants; +import com.google.protobuf.ByteString; +import io.grpc.Server; +import io.grpc.ServerBuilder; +import org.apache.dolphinscheduler.rpc.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * logger server + */ +public class LoggerServer { + + private static final Logger logger = LoggerFactory.getLogger(LoggerServer.class); + + /** + * server + */ + private Server server; + + public void start() throws IOException { + /* The port on which the server should run */ + int port = Constants.RPC_PORT; + server = ServerBuilder.forPort(port) + .addService(new LogViewServiceGrpcImpl()) + .build() + .start(); + logger.info("server started, listening on port : {}" , port); + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + // Use stderr here since the logger may have been reset by its JVM shutdown hook. + logger.info("shutting down gRPC server since JVM is shutting down"); + LoggerServer.this.stop(); + logger.info("server shut down"); + } + }); + } + + private void stop() { + if (server != null) { + server.shutdown(); + } + } + + /** + * await termination on the main thread since the grpc library uses daemon threads. + */ + private void blockUntilShutdown() throws InterruptedException { + if (server != null) { + server.awaitTermination(); + } + } + + /** + * main launches the server from the command line. + */ + public static void main(String[] args) throws IOException, InterruptedException { + final LoggerServer server = new LoggerServer(); + server.start(); + server.blockUntilShutdown(); + } + + + static class LogViewServiceGrpcImpl extends LogViewServiceGrpc.LogViewServiceImplBase { + @Override + public void rollViewLog(LogParameter request, StreamObserver responseObserver) { + + logger.info("log parameter path : {} ,skip line : {}, limit : {}", + request.getPath(), + request.getSkipLineNum(), + request.getLimit()); + List list = readFile(request.getPath(), request.getSkipLineNum(), request.getLimit()); + StringBuilder sb = new StringBuilder(); + boolean errorLineFlag = false; + for (String line : list){ + sb.append(line + "\r\n"); + } + RetStrInfo retInfoBuild = RetStrInfo.newBuilder().setMsg(sb.toString()).build(); + responseObserver.onNext(retInfoBuild); + responseObserver.onCompleted(); + } + + @Override + public void viewLog(PathParameter request, StreamObserver responseObserver) { + logger.info("task path is : {} " , request.getPath()); + RetStrInfo retInfoBuild = RetStrInfo.newBuilder().setMsg(readFile(request.getPath())).build(); + responseObserver.onNext(retInfoBuild); + responseObserver.onCompleted(); + } + + @Override + public void getLogBytes(PathParameter request, StreamObserver responseObserver) { + try { + ByteString bytes = ByteString.copyFrom(getFileBytes(request.getPath())); + RetByteInfo.Builder builder = RetByteInfo.newBuilder(); + builder.setData(bytes); + responseObserver.onNext(builder.build()); + responseObserver.onCompleted(); + }catch (Exception e){ + logger.error("get log bytes failed",e); + } + } + } + + /** + * get files bytes + * @param path + * @return + * @throws Exception + */ + private static byte[] getFileBytes(String path){ + InputStream in = null; + ByteArrayOutputStream bos = null; + try { + in = new FileInputStream(path); + bos = new ByteArrayOutputStream(); + byte[] buf = new byte[1024]; + int len = 0; + while ((len = in.read(buf)) != -1) { + bos.write(buf, 0, len); + } + return bos.toByteArray(); + }catch (IOException e){ + logger.error("get file bytes error",e); + }finally { + if (bos != null){ + try { + bos.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + if (in != null){ + try { + in.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + return null; + } + + /** + * read file content + * @param path + * @param skipLine + * @param limit + * @return + */ + private static List readFile(String path,int skipLine,int limit){ + try (Stream stream = Files.lines(Paths.get(path))) { + return stream.skip(skipLine).limit(limit).collect(Collectors.toList()); + } catch (IOException e) { + logger.error("read file failed",e); + } + return null; + } + + /** + * read file content + * @param path + * @return + * @throws Exception + */ + private static String readFile(String path){ + BufferedReader br = null; + String line = null; + StringBuilder sb = new StringBuilder(); + try { + br = new BufferedReader(new InputStreamReader(new FileInputStream(path))); + boolean errorLineFlag = false; + while ((line = br.readLine()) != null){ + sb.append(line + "\r\n"); + } + + return sb.toString(); + }catch (IOException e){ + logger.error("read file failed",e); + }finally { + try { + if (br != null){ + br.close(); + } + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + } + return null; + } + +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java new file mode 100644 index 0000000000..3a79f7073d --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java @@ -0,0 +1,240 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.utils; + + +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.ShowType; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.List; + +/** + * alert manager + */ +public class AlertManager { + + private static final Logger logger = LoggerFactory.getLogger(AlertManager.class); + + private AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); + + + /** + * command type convert chinese + * @param commandType + * @return + */ + private String getCommandCnName(CommandType commandType) { + switch (commandType) { + case RECOVER_TOLERANCE_FAULT_PROCESS: + return "recover tolerance fault process"; + case RECOVER_SUSPENDED_PROCESS: + return "recover suspended process"; + case START_CURRENT_TASK_PROCESS: + return "start current task process"; + case START_FAILURE_TASK_PROCESS: + return "start failure task process"; + case START_PROCESS: + return "start process"; + case REPEAT_RUNNING: + return "repeat running"; + case SCHEDULER: + return "scheduler"; + case COMPLEMENT_DATA: + return "complement data"; + case PAUSE: + return "pause"; + case STOP: + return "stop"; + default: + return "unknown type"; + } + } + + /** + * process instance format + */ + private static final String PROCESS_INSTANCE_FORMAT = + "\"Id:%d\"," + + "\"Name:%s\"," + + "\"Job type: %s\"," + + "\"State: %s\"," + + "\"Recovery:%s\"," + + "\"Run time: %d\"," + + "\"Start time: %s\"," + + "\"End time: %s\"," + + "\"Host: %s\"" ; + + /** + * get process instance content + * @param processInstance + * @return + */ + public String getContentProcessInstance(ProcessInstance processInstance, + List taskInstances){ + + String res = ""; + if(processInstance.getState().typeIsSuccess()){ + res = String.format(PROCESS_INSTANCE_FORMAT, + processInstance.getId(), + processInstance.getName(), + getCommandCnName(processInstance.getCommandType()), + processInstance.getState().toString(), + processInstance.getRecovery().toString(), + processInstance.getRunTimes(), + DateUtils.dateToString(processInstance.getStartTime()), + DateUtils.dateToString(processInstance.getEndTime()), + processInstance.getHost() + + ); + res = "[" + res + "]"; + }else if(processInstance.getState().typeIsFailure()){ + + List failedTaskList = new ArrayList<>(); + + for(TaskInstance task : taskInstances){ + if(task.getState().typeIsSuccess()){ + continue; + } + LinkedHashMap failedTaskMap = new LinkedHashMap(); + failedTaskMap.put("task id", String.valueOf(task.getId())); + failedTaskMap.put("task name", task.getName()); + failedTaskMap.put("task type", task.getTaskType()); + failedTaskMap.put("task state", task.getState().toString()); + failedTaskMap.put("task start time", DateUtils.dateToString(task.getStartTime())); + failedTaskMap.put("task end time", DateUtils.dateToString(task.getEndTime())); + failedTaskMap.put("host", task.getHost()); + failedTaskMap.put("log path", task.getLogPath()); + failedTaskList.add(failedTaskMap); + } + res = JSONUtils.toJson(failedTaskList); + } + + return res; + } + + /** + * getting worker fault tolerant content + * @param processInstance + * @param toleranceTaskList + * @return + */ + private String getWorkerToleranceContent(ProcessInstance processInstance, List toleranceTaskList){ + + List> toleranceTaskInstanceList = new ArrayList<>(); + + for(TaskInstance taskInstance: toleranceTaskList){ + LinkedHashMap toleranceWorkerContentMap = new LinkedHashMap(); + toleranceWorkerContentMap.put("process name", processInstance.getName()); + toleranceWorkerContentMap.put("task name", taskInstance.getName()); + toleranceWorkerContentMap.put("host", taskInstance.getHost()); + toleranceWorkerContentMap.put("task retry times", String.valueOf(taskInstance.getRetryTimes())); + toleranceTaskInstanceList.add(toleranceWorkerContentMap); + } + return JSONUtils.toJson(toleranceTaskInstanceList); + } + + /** + * send worker alert fault tolerance + * @param processInstance + * @param toleranceTaskList + */ + public void sendAlertWorkerToleranceFault(ProcessInstance processInstance, List toleranceTaskList){ + Alert alert = new Alert(); + alert.setTitle("worker fault tolerance"); + alert.setShowType(ShowType.TABLE); + String content = getWorkerToleranceContent(processInstance, toleranceTaskList); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setCreateTime(new Date()); + alert.setAlertGroupId(processInstance.getWarningGroupId() == null ? 1:processInstance.getWarningGroupId()); + alert.setReceivers(processInstance.getProcessDefinition().getReceivers()); + alert.setReceiversCc(processInstance.getProcessDefinition().getReceiversCc()); + + alertDao.addAlert(alert); + logger.info("add alert to db , alert : {}", alert.toString()); + + } + + /** + * send process instance alert + * @param processInstance + */ + public void sendAlertProcessInstance(ProcessInstance processInstance, + List taskInstances){ + + boolean sendWarnning = false; + WarningType warningType = processInstance.getWarningType(); + switch (warningType){ + case ALL: + if(processInstance.getState().typeIsFinished()){ + sendWarnning = true; + } + break; + case SUCCESS: + if(processInstance.getState().typeIsSuccess()){ + sendWarnning = true; + } + break; + case FAILURE: + if(processInstance.getState().typeIsFailure()){ + sendWarnning = true; + } + break; + default: + } + if(!sendWarnning){ + return; + } + Alert alert = new Alert(); + + + String cmdName = getCommandCnName(processInstance.getCommandType()); + String success = processInstance.getState().typeIsSuccess() ? "success" :"failed"; + alert.setTitle(cmdName + success); + ShowType showType = processInstance.getState().typeIsSuccess() ? ShowType.TEXT : ShowType.TABLE; + alert.setShowType(showType); + String content = getContentProcessInstance(processInstance, taskInstances); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(processInstance.getWarningGroupId()); + alert.setCreateTime(new Date()); + alert.setReceivers(processInstance.getProcessDefinition().getReceivers()); + alert.setReceiversCc(processInstance.getProcessDefinition().getReceiversCc()); + + alertDao.addAlert(alert); + logger.info("add alert to db , alert: {}", alert.toString()); + } + + public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition) { + alertDao.sendProcessTimeoutAlert(processInstance, processDefinition); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java new file mode 100644 index 0000000000..daa6ca97ef --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.utils; + + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; +import org.apache.commons.lang.StringUtils; + +import java.util.ArrayList; +import java.util.List; + + +/** + * spark args utils + */ +public class FlinkArgsUtils { + + /** + * build args + * @param param + * @return + */ + public static List buildArgs(FlinkParameters param) { + List args = new ArrayList<>(); + + args.add(Constants.FLINK_RUN_MODE); //-m + + args.add(Constants.FLINK_YARN_CLUSTER); //yarn-cluster + + if (param.getSlot() != 0) { + args.add(Constants.FLINK_YARN_SLOT); + args.add(String.format("%d", param.getSlot())); //-ys + } + + if (StringUtils.isNotEmpty(param.getAppName())) { //-ynm + args.add(Constants.FLINK_APP_NAME); + args.add(param.getAppName()); + } + + if (param.getTaskManager() != 0) { //-yn + args.add(Constants.FLINK_TASK_MANAGE); + args.add(String.format("%d", param.getTaskManager())); + } + + if (StringUtils.isNotEmpty(param.getJobManagerMemory())) { + args.add(Constants.FLINK_JOB_MANAGE_MEM); + args.add(param.getJobManagerMemory()); //-yjm + } + + if (StringUtils.isNotEmpty(param.getTaskManagerMemory())) { // -ytm + args.add(Constants.FLINK_TASK_MANAGE_MEM); + args.add(param.getTaskManagerMemory()); + } + args.add(Constants.FLINK_detach); //-d + + + if(param.getProgramType() !=null ){ + if(param.getProgramType()!=ProgramType.PYTHON){ + if (StringUtils.isNotEmpty(param.getMainClass())) { + args.add(Constants.FLINK_MAIN_CLASS); //-c + args.add(param.getMainClass()); //main class + } + } + } + + if (param.getMainJar() != null) { + args.add(param.getMainJar().getRes()); + } + + + // --files --conf --libjar ... + if (StringUtils.isNotEmpty(param.getOthers())) { + String others = param.getOthers(); + if(!others.contains("--queue")){ + if (StringUtils.isNotEmpty(param.getQueue())) { + args.add(Constants.SPARK_QUEUE); + args.add(param.getQueue()); + } + } + args.add(param.getOthers()); + }else if (StringUtils.isNotEmpty(param.getQueue())) { + args.add(Constants.SPARK_QUEUE); + args.add(param.getQueue()); + + } + + if (StringUtils.isNotEmpty(param.getMainArgs())) { + args.add(param.getMainArgs()); + } + + return args; + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/LoggerUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/LoggerUtils.java new file mode 100644 index 0000000000..a4985f2c9b --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/LoggerUtils.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.slf4j.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * logger utils + */ +public class LoggerUtils { + + /** + * rules for extracting application ID + */ + private static final Pattern APPLICATION_REGEX = Pattern.compile(Constants.APPLICATION_REGEX); + + /** + * Task Logger's prefix + */ + public static final String TASK_LOGGER_INFO_PREFIX = "TASK"; + + public static final String TASK_LOGGER_THREAD_NAME = "TaskLogInfo"; + + /** + * build job id + * @param affix + * @param processDefId + * @param processInstId + * @param taskId + * @return + */ + public static String buildTaskId(String affix, + int processDefId, + int processInstId, + int taskId){ + // - [taskAppId=TASK_79_4084_15210] + return String.format(" - [taskAppId=%s-%s-%s-%s]",affix, + processDefId, + processInstId, + taskId); + } + + + /** + * processing log + * get yarn application id list + * @param log + * @param logger + * @return + */ + public static List getAppIds(String log, Logger logger) { + + List appIds = new ArrayList(); + + Matcher matcher = APPLICATION_REGEX.matcher(log); + + // analyse logs to get all submit yarn application id + while (matcher.find()) { + String appId = matcher.group(); + if(!appIds.contains(appId)){ + logger.info("find app id: {}", appId); + appIds.add(appId); + } + } + return appIds; + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java new file mode 100644 index 0000000000..a326977729 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.utils; + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; + +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +/** + * param utils + */ +public class ParamUtils { + + /** + * parameter conversion + * + * @param globalParams + * @param localParams + * @return + */ + public static Map convert(Map globalParams, + Map globalParamsMap, + Map localParams, + CommandType commandType, + Date scheduleTime){ + if (globalParams == null + && localParams == null){ + return null; + } + // if it is a complement, + // you need to pass in the task instance id to locate the time + // of the process instance complement + Map timeParams = BusinessTimeUtils + .getBusinessTime(commandType, + scheduleTime); + + if (globalParamsMap != null){ + timeParams.putAll(globalParamsMap); + } + + if (globalParams != null && localParams != null){ + globalParams.putAll(localParams); + }else if (globalParams == null && localParams != null){ + globalParams = localParams; + } + Iterator> iter = globalParams.entrySet().iterator(); + while (iter.hasNext()){ + Map.Entry en = iter.next(); + Property property = en.getValue(); + + if (property.getValue() != null && property.getValue().length() > 0){ + if (property.getValue().startsWith("$")){ + /** + * local parameter refers to global parameter with the same name + * note: the global parameters of the process instance here are solidified parameters, + * and there are no variables in them. + */ + String val = property.getValue(); + val = ParameterUtils.convertParameterPlaceholders(val, timeParams); + property.setValue(val); + } + } + } + + return globalParams; + } + + /** + * format convert + * @param paramsMap + * @return + */ + public static Map convert(Map paramsMap){ + Map map = new HashMap<>(); + Iterator> iter = paramsMap.entrySet().iterator(); + while (iter.hasNext()){ + Map.Entry en = iter.next(); + map.put(en.getKey(),en.getValue().getValue()); + } + return map; + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java new file mode 100644 index 0000000000..bcfb1f2dad --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.CommonUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.rpc.LogClient; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +/** + * mainly used to get the start command line of a process + */ +public class ProcessUtils { + /** + * logger + */ + private final static Logger logger = LoggerFactory.getLogger(ProcessUtils.class); + + /** + * build command line characters + * @return + */ + public static String buildCommandStr(List commandList) throws IOException { + String cmdstr; + String[] cmd = commandList.toArray(new String[commandList.size()]); + SecurityManager security = System.getSecurityManager(); + boolean allowAmbiguousCommands = false; + if (security == null) { + allowAmbiguousCommands = true; + String value = System.getProperty("jdk.lang.Process.allowAmbiguousCommands"); + if (value != null) { + allowAmbiguousCommands = !"false".equalsIgnoreCase(value); + } + } + if (allowAmbiguousCommands) { + + String executablePath = new File(cmd[0]).getPath(); + + if (needsEscaping(VERIFICATION_LEGACY, executablePath)) { + executablePath = quoteString(executablePath); + } + + cmdstr = createCommandLine( + VERIFICATION_LEGACY, executablePath, cmd); + } else { + String executablePath; + try { + executablePath = getExecutablePath(cmd[0]); + } catch (IllegalArgumentException e) { + + StringBuilder join = new StringBuilder(); + for (String s : cmd) { + join.append(s).append(' '); + } + + cmd = getTokensFromCommand(join.toString()); + executablePath = getExecutablePath(cmd[0]); + + // Check new executable name once more + if (security != null) { + security.checkExec(executablePath); + } + } + + + cmdstr = createCommandLine( + + isShellFile(executablePath) ? VERIFICATION_CMD_BAT : VERIFICATION_WIN32, quoteString(executablePath), cmd); + } + return cmdstr; + } + + private static String getExecutablePath(String path) throws IOException { + boolean pathIsQuoted = isQuoted(true, path, "Executable name has embedded quote, split the arguments"); + + File fileToRun = new File(pathIsQuoted ? path.substring(1, path.length() - 1) : path); + return fileToRun.getPath(); + } + + private static boolean isShellFile(String executablePath) { + String upPath = executablePath.toUpperCase(); + return (upPath.endsWith(".CMD") || upPath.endsWith(".BAT")); + } + + private static String quoteString(String arg) { + StringBuilder argbuf = new StringBuilder(arg.length() + 2); + return argbuf.append('"').append(arg).append('"').toString(); + } + + + private static String[] getTokensFromCommand(String command) { + ArrayList matchList = new ArrayList<>(8); + Matcher regexMatcher = LazyPattern.PATTERN.matcher(command); + while (regexMatcher.find()) { + matchList.add(regexMatcher.group()); + } + return matchList.toArray(new String[matchList.size()]); + } + + private static class LazyPattern { + // Escape-support version: + // "(\")((?:\\\\\\1|.)+?)\\1|([^\\s\"]+)"; + private static final Pattern PATTERN = Pattern.compile("[^\\s\"]+|\"[^\"]*\""); + } + + private static final int VERIFICATION_CMD_BAT = 0; + + private static final int VERIFICATION_WIN32 = 1; + + private static final int VERIFICATION_LEGACY = 2; + + private static final char[][] ESCAPE_VERIFICATION = {{' ', '\t', '<', '>', '&', '|', '^'}, + + {' ', '\t', '<', '>'}, {' ', '\t'}}; + + private static Matcher matcher; + + private static String createCommandLine(int verificationType, final String executablePath, final String[] cmd) { + StringBuilder cmdbuf = new StringBuilder(80); + + cmdbuf.append(executablePath); + + for (int i = 1; i < cmd.length; ++i) { + cmdbuf.append(' '); + String s = cmd[i]; + if (needsEscaping(verificationType, s)) { + cmdbuf.append('"').append(s); + + if ((verificationType != VERIFICATION_CMD_BAT) && s.endsWith("\\")) { + cmdbuf.append('\\'); + } + cmdbuf.append('"'); + } else { + cmdbuf.append(s); + } + } + return cmdbuf.toString(); + } + + private static boolean isQuoted(boolean noQuotesInside, String arg, String errorMessage) { + int lastPos = arg.length() - 1; + if (lastPos >= 1 && arg.charAt(0) == '"' && arg.charAt(lastPos) == '"') { + // The argument has already been quoted. + if (noQuotesInside) { + if (arg.indexOf('"', 1) != lastPos) { + // There is ["] inside. + throw new IllegalArgumentException(errorMessage); + } + } + return true; + } + if (noQuotesInside) { + if (arg.indexOf('"') >= 0) { + // There is ["] inside. + throw new IllegalArgumentException(errorMessage); + } + } + return false; + } + + private static boolean needsEscaping(int verificationType, String arg) { + + boolean argIsQuoted = isQuoted((verificationType == VERIFICATION_CMD_BAT), arg, "Argument has embedded quote, use the explicit CMD.EXE call."); + + if (!argIsQuoted) { + char[] testEscape = ESCAPE_VERIFICATION[verificationType]; + for (int i = 0; i < testEscape.length; ++i) { + if (arg.indexOf(testEscape[i]) >= 0) { + return true; + } + } + } + return false; + } + + + /** + * kill yarn application + * @param appIds + * @param logger + * @param tenantCode + * @throws IOException + */ + public static void cancelApplication(List appIds, Logger logger, String tenantCode,String workDir) + throws IOException { + if (appIds.size() > 0) { + String appid = appIds.get(appIds.size() - 1); + String commandFile = String + .format("%s/%s.kill", workDir, appid); + String cmd = "yarn application -kill " + appid; + try { + StringBuilder sb = new StringBuilder(); + sb.append("#!/bin/sh\n"); + sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); + sb.append("cd $BASEDIR\n"); + if (CommonUtils.getSystemEnvPath() != null) { + sb.append("source " + CommonUtils.getSystemEnvPath() + "\n"); + } + sb.append("\n\n"); + sb.append(cmd); + + File f = new File(commandFile); + + if (!f.exists()) { + FileUtils.writeStringToFile(new File(commandFile), sb.toString(), Charset.forName("UTF-8")); + } + + String runCmd = "sh " + commandFile; + if (StringUtils.isNotEmpty(tenantCode)) { + runCmd = "sudo -u " + tenantCode + " " + runCmd; + } + + logger.info("kill cmd:{}", runCmd); + + Runtime.getRuntime().exec(runCmd); + } catch (Exception e) { + logger.error("kill application failed : " + e.getMessage(), e); + } + } + } + + /** + * kill tasks according to different task types + * @param taskInstance + */ + public static void kill(TaskInstance taskInstance) { + try { + int processId = taskInstance.getPid(); + if(processId == 0 ){ + logger.error("process kill failed, process id :{}, task id:{}", + processId, taskInstance.getId()); + return ; + } + + String cmd = String.format("sudo kill -9 %s", getPidsStr(processId)); + + logger.info("process id:{}, cmd:{}", processId, cmd); + + OSUtils.exeCmd(cmd); + + // find log and kill yarn job + killYarnJob(taskInstance); + + } catch (Exception e) { + logger.error("kill failed : " + e.getMessage(), e); + } + } + + /** + * get pids str + * @param processId + * @return + * @throws Exception + */ + private static String getPidsStr(int processId)throws Exception{ + StringBuilder sb = new StringBuilder(); + // pstree -p pid get sub pids + String pids = OSUtils.exeCmd("pstree -p " +processId+ ""); + Matcher mat = Pattern.compile("(\\d+)").matcher(pids); + while (mat.find()){ + sb.append(mat.group()+" "); + } + return sb.toString().trim(); + } + + /** + * find logs and kill yarn tasks + * @param taskInstance + */ + public static void killYarnJob(TaskInstance taskInstance) { + try { + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + LogClient logClient = new LogClient(taskInstance.getHost(), Constants.RPC_PORT); + + String log = logClient.viewLog(taskInstance.getLogPath()); + if (StringUtils.isNotEmpty(log)) { + List appIds = LoggerUtils.getAppIds(log, logger); + String workerDir = taskInstance.getExecutePath(); + if (StringUtils.isEmpty(workerDir)) { + logger.error("task instance work dir is empty"); + throw new RuntimeException("task instance work dir is empty"); + } + if (appIds.size() > 0) { + cancelApplication(appIds, logger, taskInstance.getProcessInstance().getTenantCode(), taskInstance.getExecutePath()); + } + } + + } catch (Exception e) { + logger.error("kill yarn job failure",e); + } + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/SparkArgsUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/SparkArgsUtils.java new file mode 100644 index 0000000000..3d57330d57 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/SparkArgsUtils.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.utils; + + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.common.task.spark.SparkParameters; +import org.apache.commons.lang.StringUtils; + +import java.util.ArrayList; +import java.util.List; + + +/** + * spark args utils + */ +public class SparkArgsUtils { + + /** + * build args + * @param param + * @return + */ + public static List buildArgs(SparkParameters param) { + List args = new ArrayList<>(); + String deployMode = "cluster"; + + args.add(Constants.MASTER); + if(StringUtils.isNotEmpty(param.getDeployMode())){ + deployMode = param.getDeployMode(); + + } + if(!"local".equals(deployMode)){ + args.add("yarn"); + args.add(Constants.DEPLOY_MODE); + } + + args.add(param.getDeployMode()); + + if(param.getProgramType() !=null ){ + if(param.getProgramType()!=ProgramType.PYTHON){ + if (StringUtils.isNotEmpty(param.getMainClass())) { + args.add(Constants.CLASS); + args.add(param.getMainClass()); + } + } + } + + + if (param.getDriverCores() != 0) { + args.add(Constants.DRIVER_CORES); + args.add(String.format("%d", param.getDriverCores())); + } + + if (StringUtils.isNotEmpty(param.getDriverMemory())) { + args.add(Constants.DRIVER_MEMORY); + args.add(param.getDriverMemory()); + } + + if (param.getNumExecutors() != 0) { + args.add(Constants.NUM_EXECUTORS); + args.add(String.format("%d", param.getNumExecutors())); + } + + if (param.getExecutorCores() != 0) { + args.add(Constants.EXECUTOR_CORES); + args.add(String.format("%d", param.getExecutorCores())); + } + + if (StringUtils.isNotEmpty(param.getExecutorMemory())) { + args.add(Constants.EXECUTOR_MEMORY); + args.add(param.getExecutorMemory()); + } + + // --files --conf --libjar ... + if (StringUtils.isNotEmpty(param.getOthers())) { + String others = param.getOthers(); + if(!others.contains("--queue")){ + if (StringUtils.isNotEmpty(param.getQueue())) { + args.add(Constants.SPARK_QUEUE); + args.add(param.getQueue()); + } + } + args.add(param.getOthers()); + }else if (StringUtils.isNotEmpty(param.getQueue())) { + args.add(Constants.SPARK_QUEUE); + args.add(param.getQueue()); + + } + + if (param.getMainJar() != null) { + args.add(param.getMainJar().getRes()); + } + + if (StringUtils.isNotEmpty(param.getMainArgs())) { + args.add(param.getMainArgs()); + } + + return args; + } + +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java new file mode 100644 index 0000000000..99f07da25f --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.dao.entity.UdfFunc; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import static org.apache.dolphinscheduler.common.utils.CollectionUtils.isNotEmpty; + +/** + * udf utils + */ +public class UDFUtils { + + /** + * create function format + */ + private static final String CREATE_FUNCTION_FORMAT = "create temporary function {0} as ''{1}''"; + + + /** + * create function list + */ + public static List createFuncs(List udfFuncs, String tenantCode,Logger logger){ + // get hive udf jar path + String hiveUdfJarPath = HadoopUtils.getHdfsUdfDir(tenantCode); + logger.info("hive udf jar path : {}" , hiveUdfJarPath); + + // is the root directory of udf defined + if (StringUtils.isEmpty(hiveUdfJarPath)) { + logger.error("not define hive udf jar path"); + throw new RuntimeException("hive udf jar base path not defined "); + } + Set resources = getFuncResouces(udfFuncs); + List funcList = new ArrayList<>(); + + // build jar sql + buildJarSql(funcList, resources, hiveUdfJarPath); + + // build temp function sql + buildTempFuncSql(funcList, udfFuncs); + + return funcList; + } + + /** + * build jar sql + */ + private static void buildJarSql(List sqls, Set resources, String uploadPath) { + String defaultFS = HadoopUtils.getInstance().getConfiguration().get(Constants.FS_DEFAULTFS); + if (!uploadPath.startsWith("hdfs:")) { + uploadPath = defaultFS + uploadPath; + } + + for (String resource : resources) { + sqls.add(String.format("add jar %s/%s", uploadPath, resource)); + } + } + + /** + * build temp function sql + */ + private static void buildTempFuncSql(List sqls, List udfFuncs) { + if (isNotEmpty(udfFuncs)) { + for (UdfFunc udfFunc : udfFuncs) { + sqls.add(MessageFormat + .format(CREATE_FUNCTION_FORMAT, udfFunc.getFuncName(), udfFunc.getClassName())); + } + } + } + + /** + * get the resource names of all functions + */ + private static Set getFuncResouces(List udfFuncs) { + Set resources = new HashSet<>(); + + for (UdfFunc udfFunc : udfFuncs) { + resources.add(udfFunc.getResourceName()); + } + + return resources; + } + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java new file mode 100644 index 0000000000..ed3b2d66d0 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java @@ -0,0 +1,333 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.queue.ITaskQueue; +import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.master.AbstractServer; +import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.apache.dolphinscheduler.server.worker.runner.FetchTaskThread; +import org.apache.dolphinscheduler.server.zk.ZKWorkerClient; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.SpringApplication; +import org.springframework.context.annotation.ComponentScan; + +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +/** + * worker server + */ +@ComponentScan("cn.escheduler") +public class WorkerServer extends AbstractServer { + + private static final Logger logger = LoggerFactory.getLogger(WorkerServer.class); + + + /** + * zk worker client + */ + private static ZKWorkerClient zkWorkerClient = null; + + /** + * process database access + */ + @Autowired + private ProcessDao processDao; + + /** + * alert database access + */ + @Autowired + private AlertDao alertDao; + + /** + * heartbeat thread pool + */ + private ScheduledExecutorService heartbeatWorerService; + + /** + * task queue impl + */ + protected ITaskQueue taskQueue; + + /** + * kill executor service + */ + private ExecutorService killExecutorService; + + /** + * fetch task executor service + */ + private ExecutorService fetchTaskExecutorService; + + public WorkerServer(){} + + public WorkerServer(ProcessDao processDao, AlertDao alertDao){ + try { + conf = new PropertiesConfiguration(Constants.WORKER_PROPERTIES_PATH); + }catch (ConfigurationException e){ + logger.error("load configuration failed",e); + System.exit(1); + } + + zkWorkerClient = ZKWorkerClient.getZKWorkerClient(); + + this.taskQueue = TaskQueueFactory.getTaskQueueInstance(); + + this.killExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Kill-Thread-Executor"); + + this.fetchTaskExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Fetch-Thread-Executor"); + } + + + /** + * master server startup + * + * master server not use web service + */ + public static void main(String[] args) { + + SpringApplication app = new SpringApplication(WorkerServer.class); + + app.run(args); + } + + + @Override + public void run(String... args) throws Exception { + // set the name of the current thread + Thread.currentThread().setName("Worker-Main-Thread"); + + WorkerServer workerServer = new WorkerServer(processDao,alertDao); + + workerServer.run(processDao,alertDao); + + logger.info("worker server started"); + + // blocking + workerServer.awaitTermination(); + } + + + public void run(ProcessDao processDao, AlertDao alertDao){ + + // heartbeat interval + heartBeatInterval = conf.getInt(Constants.WORKER_HEARTBEAT_INTERVAL, + Constants.defaultWorkerHeartbeatInterval); + + heartbeatWorerService = ThreadUtils.newDaemonThreadScheduledExecutor("Worker-Heartbeat-Thread-Executor", Constants.defaulWorkerHeartbeatThreadNum); + + // heartbeat thread implement + Runnable heartBeatThread = heartBeatThread(); + + zkWorkerClient.setStoppable(this); + + // regular heartbeat + // delay 5 seconds, send heartbeat every 30 seconds + heartbeatWorerService. + scheduleAtFixedRate(heartBeatThread, 5, heartBeatInterval, TimeUnit.SECONDS); + + // kill process thread implement + Runnable killProcessThread = getKillProcessThread(); + + // submit kill process thread + killExecutorService.execute(killProcessThread); + + /** + * register hooks, which are called before the process exits + */ + Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { + @Override + public void run() { + + logger.warn("worker server stopped"); + // worker server exit alert + if (zkWorkerClient.getActiveMasterNum() <= 1) { + for (int i = 0; i < Constants.ESCHEDULER_WARN_TIMES_FAILOVER;i++) { + alertDao.sendServerStopedAlert(1, OSUtils.getHost(), "Worker-Server"); + } + } + + } + })); + + // get worker number of concurrent tasks + int taskNum = conf.getInt(Constants.WORKER_FETCH_TASK_NUM,Constants.defaultWorkerFetchTaskNum); + + // new fetch task thread + FetchTaskThread fetchTaskThread = new FetchTaskThread(taskNum,zkWorkerClient, processDao,conf, taskQueue); + + // submit fetch task thread + fetchTaskExecutorService.execute(fetchTaskThread); + } + + @Override + public synchronized void stop(String cause) { + + try { + //execute only once + if(Stopper.isStoped()){ + return; + } + + logger.info("worker server is stopping ..., cause : {}", cause); + + // set stop signal is true + Stopper.stop(); + + try { + //thread sleep 3 seconds for thread quitely stop + Thread.sleep(3000L); + }catch (Exception e){ + logger.warn("thread sleep exception:" + e.getMessage(), e); + } + + try { + heartbeatWorerService.shutdownNow(); + }catch (Exception e){ + logger.warn("heartbeat service stopped exception"); + } + logger.info("heartbeat service stopped"); + + try { + ThreadPoolExecutors.getInstance().shutdown(); + }catch (Exception e){ + logger.warn("threadpool service stopped exception:{}",e.getMessage()); + } + + logger.info("threadpool service stopped"); + + try { + killExecutorService.shutdownNow(); + }catch (Exception e){ + logger.warn("worker kill executor service stopped exception:{}",e.getMessage()); + } + logger.info("worker kill executor service stopped"); + + try { + fetchTaskExecutorService.shutdownNow(); + }catch (Exception e){ + logger.warn("worker fetch task service stopped exception:{}",e.getMessage()); + } + logger.info("worker fetch task service stopped"); + + try{ + zkWorkerClient.close(); + }catch (Exception e){ + logger.warn("zookeeper service stopped exception:{}",e.getMessage()); + } + logger.info("zookeeper service stopped"); + + //notify + synchronized (lock) { + terminated = true; + lock.notifyAll(); + } + } catch (Exception e) { + logger.error("worker server stop exception : " + e.getMessage(), e); + System.exit(-1); + } + } + + + /** + * heartbeat thread implement + * @return + */ + private Runnable heartBeatThread(){ + Runnable heartBeatThread = new Runnable() { + @Override + public void run() { + // send heartbeat to zk + if (StringUtils.isEmpty(zkWorkerClient.getWorkerZNode())){ + logger.error("worker send heartbeat to zk failed"); + } + + zkWorkerClient.heartBeatForZk(zkWorkerClient.getWorkerZNode() , Constants.WORKER_PREFIX); + } + }; + return heartBeatThread; + } + + + /** + * kill process thread implement + * @return + */ + private Runnable getKillProcessThread(){ + Runnable killProcessThread = new Runnable() { + @Override + public void run() { + Set taskInfoSet = taskQueue.smembers(Constants.SCHEDULER_TASKS_KILL); + while (Stopper.isRunning()){ + try { + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + } catch (InterruptedException e) { + logger.error("interrupted exception",e); + } + // if set is null , return + if (CollectionUtils.isNotEmpty(taskInfoSet)){ + for (String taskInfo : taskInfoSet){ + // task info start with current host + if (taskInfo.startsWith(OSUtils.getHost())){ + String[] taskInfoArr = taskInfo.split("-"); + if (taskInfoArr.length != 2){ + continue; + }else { + int taskInstId=Integer.parseInt(taskInfoArr[1]); + TaskInstance taskInstance = processDao.getTaskInstanceRelationByTaskId(taskInstId); + + if(taskInstance.getTaskType().equals(TaskType.DEPENDENT.toString())){ + taskInstance.setState(ExecutionStatus.KILL); + processDao.saveTaskInstance(taskInstance); + }else{ + ProcessUtils.kill(taskInstance); + } + taskQueue.srem(Constants.SCHEDULER_TASKS_KILL,taskInfo); + } + } + } + } + + taskInfoSet = taskQueue.smembers(Constants.SCHEDULER_TASKS_KILL); + } + } + }; + return killProcessThread; + } + +} + diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/TaskLogDiscriminator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/TaskLogDiscriminator.java new file mode 100644 index 0000000000..a7941a8824 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/TaskLogDiscriminator.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.log; + +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.sift.AbstractDiscriminator; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.server.utils.LoggerUtils; + +public class TaskLogDiscriminator extends AbstractDiscriminator { + + private String key; + + private String logBase; + + /** + * logger name should be like: + * Task Logger name should be like: Task-{processDefinitionId}-{processInstanceId}-{taskInstanceId} + */ + @Override + public String getDiscriminatingValue(ILoggingEvent event) { + String loggerName = event.getLoggerName() + .split(Constants.EQUAL_SIGN)[1]; + String prefix = LoggerUtils.TASK_LOGGER_INFO_PREFIX + "-"; + if (loggerName.startsWith(prefix)) { + return loggerName.substring(prefix.length(), + loggerName.length() - 1).replace("-","/"); + } else { + return "unknown_task"; + } + } + + @Override + public void start() { + started = true; + } + + @Override + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getLogBase() { + return logBase; + } + + public void setLogBase(String logBase) { + this.logBase = logBase; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/TaskLogFilter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/TaskLogFilter.java new file mode 100644 index 0000000000..c704a88520 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/TaskLogFilter.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.log; + +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.filter.Filter; +import ch.qos.logback.core.spi.FilterReply; +import org.apache.dolphinscheduler.server.utils.LoggerUtils; + +/** + * task log filter + */ +public class TaskLogFilter extends Filter { + + @Override + public FilterReply decide(ILoggingEvent event) { + if (event.getThreadName().startsWith(LoggerUtils.TASK_LOGGER_THREAD_NAME)) { + return FilterReply.ACCEPT; + } + return FilterReply.DENY; + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/WorkerLogFilter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/WorkerLogFilter.java new file mode 100644 index 0000000000..e666b3ac2f --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/log/WorkerLogFilter.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.log; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.filter.Filter; +import ch.qos.logback.core.spi.FilterReply; + +/** + * worker log filter + */ +public class WorkerLogFilter extends Filter { + Level level; + + @Override + public FilterReply decide(ILoggingEvent event) { + if (event.getThreadName().startsWith("Worker-")){ + return FilterReply.ACCEPT; + } + return FilterReply.DENY; + } + public void setLevel(String level) { + this.level = Level.toLevel(level); + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java new file mode 100644 index 0000000000..4dfc6fb513 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java @@ -0,0 +1,324 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.runner; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.queue.ITaskQueue; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.common.zk.AbstractZKClient; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.WorkerGroup; +import org.apache.dolphinscheduler.server.zk.ZKWorkerClient; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.lang3.StringUtils; +import org.apache.curator.framework.recipes.locks.InterProcessMutex; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadPoolExecutor; + +/** + * fetch task thread + */ +public class FetchTaskThread implements Runnable{ + + private static final Logger logger = LoggerFactory.getLogger(FetchTaskThread.class); + /** + * set worker concurrent tasks + */ + private final int taskNum; + + /** + * zkWorkerClient + */ + private final ZKWorkerClient zkWorkerClient; + + /** + * task queue impl + */ + protected ITaskQueue taskQueue; + + /** + * process database access + */ + private final ProcessDao processDao; + + /** + * worker thread pool executor + */ + private final ExecutorService workerExecService; + + /** + * worker exec nums + */ + private int workerExecNums; + + /** + * conf + */ + private Configuration conf; + + /** + * task instance + */ + private TaskInstance taskInstance; + + /** + * task instance id + */ + Integer taskInstId; + + public FetchTaskThread(int taskNum, ZKWorkerClient zkWorkerClient, + ProcessDao processDao, Configuration conf, + ITaskQueue taskQueue){ + this.taskNum = taskNum; + this.zkWorkerClient = zkWorkerClient; + this.processDao = processDao; + this.workerExecNums = conf.getInt(Constants.WORKER_EXEC_THREADS, + Constants.defaultWorkerExecThreadNum); + // worker thread pool executor + this.workerExecService = ThreadUtils.newDaemonFixedThreadExecutor("Worker-Fetch-Task-Thread",workerExecNums); + this.conf = conf; + this.taskQueue = taskQueue; + } + + /** + * Check if the task runs on this worker + * @param taskInstance + * @param host + * @return + */ + private boolean checkWorkerGroup(TaskInstance taskInstance, String host){ + + int taskWorkerGroupId = processDao.getTaskWorkerGroupId(taskInstance); + + if(taskWorkerGroupId <= 0){ + return true; + } + WorkerGroup workerGroup = processDao.queryWorkerGroupById(taskWorkerGroupId); + if(workerGroup == null ){ + logger.info("task {} cannot find the worker group, use all worker instead.", taskInstance.getId()); + return true; + } + String ips = workerGroup.getIpList(); + if(StringUtils.isBlank(ips)){ + logger.error("task:{} worker group:{} parameters(ip_list) is null, this task would be running on all workers", + taskInstance.getId(), workerGroup.getId()); + } + String[] ipArray = ips.split(Constants.COMMA); + List ipList = Arrays.asList(ipArray); + return ipList.contains(host); + } + + + + + @Override + public void run() { + while (Stopper.isRunning()){ + InterProcessMutex mutex = null; + try { + ThreadPoolExecutor poolExecutor = (ThreadPoolExecutor) workerExecService; + //check memory and cpu usage and threads + boolean runCheckFlag = OSUtils.checkResource(this.conf, false) && checkThreadCount(poolExecutor); + + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + + if(!runCheckFlag) { + continue; + } + + //whether have tasks, if no tasks , no need lock //get all tasks + List tasksQueueList = taskQueue.getAllTasks(Constants.SCHEDULER_TASKS_QUEUE); + if (CollectionUtils.isEmpty(tasksQueueList)){ + continue; + } + // creating distributed locks, lock path /escheduler/lock/worker + mutex = zkWorkerClient.acquireZkLock(zkWorkerClient.getZkClient(), + zkWorkerClient.getWorkerLockPath()); + + + // task instance id str + List taskQueueStrArr = taskQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, taskNum); + + for(String taskQueueStr : taskQueueStrArr){ + if (StringUtils.isEmpty(taskQueueStr)) { + continue; + } + + if (!checkThreadCount(poolExecutor)) { + break; + } + + // get task instance id + + taskInstId = getTaskInstanceId(taskQueueStr); + + // get task instance relation + taskInstance = processDao.getTaskInstanceRelationByTaskId(taskInstId); + + Tenant tenant = processDao.getTenantForProcess(taskInstance.getProcessInstance().getTenantId(), + taskInstance.getProcessDefine().getUserId()); + + // verify tenant is null + if (verifyTenantIsNull(taskQueueStr, tenant)) { + continue; + } + + // set queue for process instance, user-specified queue takes precedence over tenant queue + String userQueue = processDao.queryUserQueueByProcessInstanceId(taskInstance.getProcessInstanceId()); + taskInstance.getProcessInstance().setQueue(StringUtils.isEmpty(userQueue) ? tenant.getQueue() : userQueue); + + logger.info("worker fetch taskId : {} from queue ", taskInstId); + + // mainly to wait for the master insert task to succeed + waitForMasterEnterQueue(); + + // verify task instance is null + if (verifyTaskInstanceIsNull(taskQueueStr)) { + continue; + } + + if(!checkWorkerGroup(taskInstance, OSUtils.getHost())){ + continue; + } + + // local execute path + String execLocalPath = getExecLocalPath(); + + logger.info("task instance local execute path : {} ", execLocalPath); + + // init task + taskInstance.init(OSUtils.getHost(), + new Date(), + execLocalPath); + + // check and create Linux users + FileUtils.createWorkDirAndUserIfAbsent(execLocalPath, + tenant.getTenantCode(), logger); + + logger.info("task : {} ready to submit to task scheduler thread",taskInstId); + // submit task + workerExecService.submit(new TaskScheduleThread(taskInstance, processDao)); + + // remove node from zk + taskQueue.removeNode(Constants.SCHEDULER_TASKS_QUEUE, taskQueueStr); + } + + }catch (Exception e){ + logger.error("fetch task thread failure" ,e); + }finally { + AbstractZKClient.releaseMutex(mutex); + } + } + } + + /** + * verify task instance is null + * @param taskQueueStr + * @return + */ + private boolean verifyTaskInstanceIsNull(String taskQueueStr) { + if (taskInstance == null ) { + logger.error("task instance is null. task id : {} ", taskInstId); + taskQueue.removeNode(Constants.SCHEDULER_TASKS_QUEUE, taskQueueStr); + return true; + } + return false; + } + + /** + * verify tenant is null + * @param taskQueueStr + * @param tenant + * @return + */ + private boolean verifyTenantIsNull(String taskQueueStr, Tenant tenant) { + if(tenant == null){ + logger.error("tenant not exists,process define id : {},process instance id : {},task instance id : {}", + taskInstance.getProcessDefine().getId(), + taskInstance.getProcessInstance().getId(), + taskInstance.getId()); + taskQueue.removeNode(Constants.SCHEDULER_TASKS_QUEUE, taskQueueStr); + return true; + } + return false; + } + + /** + * get execute local path + * @return + */ + private String getExecLocalPath(){ + return FileUtils.getProcessExecDir(taskInstance.getProcessDefine().getProjectId(), + taskInstance.getProcessDefine().getId(), + taskInstance.getProcessInstance().getId(), + taskInstance.getId()); + } + + /** + * check + * @param poolExecutor + * @return + */ + private boolean checkThreadCount(ThreadPoolExecutor poolExecutor) { + int activeCount = poolExecutor.getActiveCount(); + if (activeCount >= workerExecNums) { + logger.info("thread insufficient , activeCount : {} , " + + "workerExecNums : {}, will sleep : {} millis for thread resource", + activeCount, + workerExecNums, + Constants.SLEEP_TIME_MILLIS); + return false; + } + return true; + } + + /** + * mainly to wait for the master insert task to succeed + * @throws Exception + */ + private void waitForMasterEnterQueue()throws Exception{ + int retryTimes = 30; + + while (taskInstance == null && retryTimes > 0) { + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + taskInstance = processDao.findTaskInstanceById(taskInstId); + retryTimes--; + } + } + + /** + * get task instance id + * + * @param taskQueueStr + * @return + */ + private int getTaskInstanceId(String taskQueueStr){ + return Integer.parseInt(taskQueueStr.split(Constants.UNDERLINE)[3]); + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java new file mode 100644 index 0000000000..91da0b6d1c --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.runner; + + +import ch.qos.logback.classic.LoggerContext; +import ch.qos.logback.classic.sift.SiftingAppender; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.server.utils.LoggerUtils; +import org.apache.dolphinscheduler.server.worker.log.TaskLogDiscriminator; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.TaskManager; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import com.alibaba.fastjson.JSONObject; +import org.apache.dolphinscheduler.common.utils.CommonUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.util.*; +import java.util.stream.Collectors; + + +/** + * task scheduler thread + */ +public class TaskScheduleThread implements Runnable { + + /** + * logger + */ + private final Logger logger = LoggerFactory.getLogger(TaskScheduleThread.class); + + /** + * task instance + */ + private TaskInstance taskInstance; + + /** + * process database access + */ + private final ProcessDao processDao; + + /** + * abstract task + */ + private AbstractTask task; + + public TaskScheduleThread(TaskInstance taskInstance, ProcessDao processDao){ + this.processDao = processDao; + this.taskInstance = taskInstance; + } + + @Override + public void run() { + + // update task state is running according to task type + updateTaskState(taskInstance.getTaskType()); + + try { + logger.info("script path : {}", taskInstance.getExecutePath()); + // task node + TaskNode taskNode = JSONObject.parseObject(taskInstance.getTaskJson(), TaskNode.class); + + // copy hdfs/minio file to local + copyHdfsToLocal(processDao, + taskInstance.getExecutePath(), + createProjectResFiles(taskNode), + logger); + + // get process instance according to tak instance + ProcessInstance processInstance = taskInstance.getProcessInstance(); + // get process define according to tak instance + ProcessDefinition processDefine = taskInstance.getProcessDefine(); + + // get tenant info + Tenant tenant = processDao.getTenantForProcess(processInstance.getTenantId(), + processDefine.getUserId()); + + if(tenant == null){ + logger.error("cannot find the tenant, process definition id:{}, user id:{}", + processDefine.getId(), + processDefine.getUserId()); + task.setExitStatusCode(Constants.EXIT_CODE_FAILURE); + }else{ + + // set task props + TaskProps taskProps = new TaskProps(taskNode.getParams(), + taskInstance.getExecutePath(), + processInstance.getScheduleTime(), + taskInstance.getName(), + taskInstance.getTaskType(), + taskInstance.getId(), + CommonUtils.getSystemEnvPath(), + tenant.getTenantCode(), + tenant.getQueue(), + taskInstance.getStartTime(), + getGlobalParamsMap(), + taskInstance.getDependency(), + processInstance.getCmdTypeIfComplement()); + // set task timeout + setTaskTimeout(taskProps, taskNode); + + taskProps.setTaskAppId(String.format("%s_%s_%s", + taskInstance.getProcessDefine().getId(), + taskInstance.getProcessInstance().getId(), + taskInstance.getId())); + + // custom logger + Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, + taskInstance.getProcessDefine().getId(), + taskInstance.getProcessInstance().getId(), + taskInstance.getId())); + + task = TaskManager.newTask(taskInstance.getTaskType(), + taskProps, + taskLogger); + + // task init + task.init(); + + // task handle + task.handle(); + + // task result process + task.after(); + } + }catch (Exception e){ + logger.error("task scheduler failure", e); + task.setExitStatusCode(Constants.EXIT_CODE_FAILURE); + kill(); + } + + logger.info("task instance id : {},task final status : {}", + taskInstance.getId(), + task.getExitStatus()); + // update task instance state + processDao.changeTaskState(task.getExitStatus(), + new Date(), + taskInstance.getId()); + } + + /** + * get global paras map + * @return + */ + private Map getGlobalParamsMap() { + Map globalParamsMap = new HashMap<>(16); + + // global params string + String globalParamsStr = taskInstance.getProcessInstance().getGlobalParams(); + + if (globalParamsStr != null) { + List globalParamsList = JSONObject.parseArray(globalParamsStr, Property.class); + globalParamsMap.putAll(globalParamsList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue))); + } + return globalParamsMap; + } + + /** + * update task state according to task type + * @param taskType + */ + private void updateTaskState(String taskType) { + // update task status is running + if(taskType.equals(TaskType.SQL.name()) || + taskType.equals(TaskType.PROCEDURE.name())){ + processDao.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, + taskInstance.getStartTime(), + taskInstance.getHost(), + null, + getTaskLogPath(), + taskInstance.getId()); + }else{ + processDao.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, + taskInstance.getStartTime(), + taskInstance.getHost(), + taskInstance.getExecutePath(), + getTaskLogPath(), + taskInstance.getId()); + } + } + + /** + * get task log path + * @return + */ + private String getTaskLogPath() { + String baseLog = ((TaskLogDiscriminator) ((SiftingAppender) ((LoggerContext) LoggerFactory.getILoggerFactory()) + .getLogger("ROOT") + .getAppender("TASKLOGFILE")) + .getDiscriminator()).getLogBase(); + if (baseLog.startsWith(Constants.SINGLE_SLASH)){ + return baseLog + Constants.SINGLE_SLASH + + taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + + taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + + taskInstance.getId() + ".log"; + } + return System.getProperty("user.dir") + Constants.SINGLE_SLASH + + baseLog + Constants.SINGLE_SLASH + + taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + + taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + + taskInstance.getId() + ".log"; + } + + /** + * set task timeout + * @param taskProps + * @param taskNode + */ + private void setTaskTimeout(TaskProps taskProps, TaskNode taskNode) { + // the default timeout is the maximum value of the integer + taskProps.setTaskTimeout(Integer.MAX_VALUE); + TaskTimeoutParameter taskTimeoutParameter = taskNode.getTaskTimeoutParameter(); + if (taskTimeoutParameter.getEnable()){ + // get timeout strategy + taskProps.setTaskTimeoutStrategy(taskTimeoutParameter.getStrategy()); + switch (taskTimeoutParameter.getStrategy()){ + case WARN: + break; + case FAILED: + if (Integer.MAX_VALUE > taskTimeoutParameter.getInterval() * 60) { + taskProps.setTaskTimeout(taskTimeoutParameter.getInterval() * 60); + } + break; + case WARNFAILED: + if (Integer.MAX_VALUE > taskTimeoutParameter.getInterval() * 60) { + taskProps.setTaskTimeout(taskTimeoutParameter.getInterval() * 60); + } + break; + default: + logger.error("not support task timeout strategy: {}", taskTimeoutParameter.getStrategy()); + throw new IllegalArgumentException("not support task timeout strategy"); + + } + } + } + + + + + /** + * kill task + */ + public void kill(){ + if (task != null){ + try { + task.cancelApplication(true); + }catch (Exception e){ + logger.error(e.getMessage(),e); + } + } + } + + + /** + * create project resource files + */ + private List createProjectResFiles(TaskNode taskNode) throws Exception{ + + Set projectFiles = new HashSet<>(); + AbstractParameters baseParam = TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams()); + + if (baseParam != null) { + List projectResourceFiles = baseParam.getResourceFilesList(); + if (projectResourceFiles != null) { + projectFiles.addAll(projectResourceFiles); + } + } + + return new ArrayList<>(projectFiles); + } + + /** + * copy hdfs file to local + * + * @param processDao + * @param execLocalPath + * @param projectRes + * @param logger + */ + private void copyHdfsToLocal(ProcessDao processDao, String execLocalPath, List projectRes, Logger logger) throws IOException { + for (String res : projectRes) { + File resFile = new File(execLocalPath, res); + if (!resFile.exists()) { + try { + // query the tenant code of the resource according to the name of the resource + String tentnCode = processDao.queryTenantCodeByResName(res); + String resHdfsPath = HadoopUtils.getHdfsFilename(tentnCode, res); + + logger.info("get resource file from hdfs :{}", resHdfsPath); + HadoopUtils.getInstance().copyHdfsToLocal(resHdfsPath, execLocalPath + File.separator + res, false, true); + }catch (Exception e){ + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage()); + } + } else { + logger.info("file : {} exists ", resFile.getName()); + } + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java new file mode 100644 index 0000000000..5fdf61d76f --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java @@ -0,0 +1,569 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.utils.LoggerUtils; +import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; + +import java.io.*; +import java.lang.reflect.Field; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * abstract command executor + */ +public abstract class AbstractCommandExecutor { + /** + * rules for extracting application ID + */ + protected static final Pattern APPLICATION_REGEX = Pattern.compile(Constants.APPLICATION_REGEX); + + /** + * process + */ + private Process process; + + /** + * log handler + */ + protected Consumer> logHandler; + + /** + * task dir + */ + protected final String taskDir; + + /** + * task appId + */ + protected final String taskAppId; + + /** + * task appId + */ + protected final int taskInstId; + + /** + * tenant code , execute task linux user + */ + protected final String tenantCode; + + /** + * env file + */ + protected final String envFile; + + /** + * start time + */ + protected final Date startTime; + + /** + * timeout + */ + protected int timeout; + + /** + * logger + */ + protected Logger logger; + + /** + * log list + */ + protected final List logBuffer; + + + public AbstractCommandExecutor(Consumer> logHandler, + String taskDir, String taskAppId,int taskInstId,String tenantCode, String envFile, + Date startTime, int timeout, Logger logger){ + this.logHandler = logHandler; + this.taskDir = taskDir; + this.taskAppId = taskAppId; + this.taskInstId = taskInstId; + this.tenantCode = tenantCode; + this.envFile = envFile; + this.startTime = startTime; + this.timeout = timeout; + this.logger = logger; + this.logBuffer = Collections.synchronizedList(new ArrayList<>()); + } + + /** + * task specific execution logic + * + * @param execCommand + * @param processDao + * @return + */ + public int run(String execCommand, ProcessDao processDao) { + int exitStatusCode; + + try { + if (StringUtils.isEmpty(execCommand)) { + exitStatusCode = 0; + return exitStatusCode; + } + + String commandFilePath = buildCommandFilePath(); + + // create command file if not exists + createCommandFileIfNotExists(execCommand, commandFilePath); + + //build process + buildProcess(commandFilePath); + + // parse process output + parseProcessOutput(process); + + // get process id + int pid = getProcessId(process); + + // task instance id + int taskInstId = Integer.parseInt(taskAppId.split("_")[2]); + + processDao.updatePidByTaskInstId(taskInstId, pid); + + logger.info("process start, process id is: {}", pid); + + // if timeout occurs, exit directly + long remainTime = getRemaintime(); + + // waiting for the run to finish + boolean status = process.waitFor(remainTime, TimeUnit.SECONDS); + + if (status) { + exitStatusCode = process.exitValue(); + logger.info("process has exited, work dir:{}, pid:{} ,exitStatusCode:{}", taskDir, pid,exitStatusCode); + //update process state to db + exitStatusCode = updateState(processDao, exitStatusCode, pid, taskInstId); + + } else { + TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + if (taskInstance == null) { + logger.error("task instance id:{} not exist", taskInstId); + } else { + ProcessUtils.kill(taskInstance); + } + exitStatusCode = -1; + logger.warn("process timeout, work dir:{}, pid:{}", taskDir, pid); + } + + } catch (InterruptedException e) { + exitStatusCode = -1; + logger.error(String.format("interrupt exception: {}, task may be cancelled or killed",e.getMessage()), e); + throw new RuntimeException("interrupt exception. exitCode is : " + exitStatusCode); + } catch (Exception e) { + exitStatusCode = -1; + logger.error(e.getMessage(), e); + throw new RuntimeException("process error . exitCode is : " + exitStatusCode); + } + + return exitStatusCode; + } + + /** + * build process + * + * @param commandFile + * @throws IOException + */ + private void buildProcess(String commandFile) throws IOException { + //init process builder + ProcessBuilder processBuilder = new ProcessBuilder(); + // setting up a working directory + processBuilder.directory(new File(taskDir)); + // merge error information to standard output stream + processBuilder.redirectErrorStream(true); + // setting up user to run commands + processBuilder.command("sudo", "-u", tenantCode, commandType(), commandFile); + + process = processBuilder.start(); + + // print command + printCommand(processBuilder); + } + + /** + * update process state to db + * + * @param processDao + * @param exitStatusCode + * @param pid + * @param taskInstId + * @return + */ + private int updateState(ProcessDao processDao, int exitStatusCode, int pid, int taskInstId) { + //get yarn state by log + if (exitStatusCode != 0) { + TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + logger.info("process id is {}", pid); + + List appIds = getAppLinks(taskInstance.getLogPath()); + if (appIds.size() > 0) { + String appUrl = String.join(Constants.COMMA, appIds); + logger.info("yarn log url:{}",appUrl); + processDao.updatePidByTaskInstId(taskInstId, pid, appUrl); + } + + // check if all operations are completed + if (!isSuccessOfYarnState(appIds)) { + exitStatusCode = -1; + } + } + return exitStatusCode; + } + + + /** + * cancel python task + */ + public void cancelApplication() throws Exception { + if (process == null) { + return; + } + + // clear log + clear(); + + int processId = getProcessId(process); + + logger.info("cancel process: {}", processId); + + // kill , waiting for completion + boolean killed = softKill(processId); + + if (!killed) { + // hard kill + hardKill(processId); + + // destory + process.destroy(); + + process = null; + } + } + + /** + * soft kill + * @param processId + * @return + * @throws InterruptedException + */ + private boolean softKill(int processId) { + + if (processId != 0 && process.isAlive()) { + try { + // sudo -u user command to run command + String cmd = String.format("sudo kill %d", processId); + + logger.info("soft kill task:{}, process id:{}, cmd:{}", taskAppId, processId, cmd); + + Runtime.getRuntime().exec(cmd); + } catch (IOException e) { + logger.info("kill attempt failed." + e.getMessage(), e); + } + } + + return process.isAlive(); + } + + /** + * hard kill + * @param processId + */ + private void hardKill(int processId) { + if (processId != 0 && process.isAlive()) { + try { + String cmd = String.format("sudo kill -9 %d", processId); + + logger.info("hard kill task:{}, process id:{}, cmd:{}", taskAppId, processId, cmd); + + Runtime.getRuntime().exec(cmd); + } catch (IOException e) { + logger.error("kill attempt failed." + e.getMessage(), e); + } + } + } + + /** + * print command + * @param processBuilder + */ + private void printCommand(ProcessBuilder processBuilder) { + String cmdStr; + + try { + cmdStr = ProcessUtils.buildCommandStr(processBuilder.command()); + logger.info("task run command:\n{}", cmdStr); + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + } + + /** + * clear + */ + private void clear() { + if (!logBuffer.isEmpty()) { + // log handle + logHandler.accept(logBuffer); + + logBuffer.clear(); + } + } + + /** + * get the standard output of the process + */ + private void parseProcessOutput(Process process) { + String threadLoggerInfoName = String.format(LoggerUtils.TASK_LOGGER_THREAD_NAME + "-%s", taskAppId); + ExecutorService parseProcessOutputExecutorService = ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName); + parseProcessOutputExecutorService.submit(new Runnable(){ + @Override + public void run() { + BufferedReader inReader = null; + + try { + inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); + String line; + + long lastFlushTime = System.currentTimeMillis(); + + while ((line = inReader.readLine()) != null) { + logBuffer.add(line); + lastFlushTime = flush(lastFlushTime); + } + } catch (Exception e) { + logger.error(e.getMessage(),e); + } finally { + clear(); + close(inReader); + } + } + }); + parseProcessOutputExecutorService.shutdown(); + } + + public int getPid() { + return getProcessId(process); + } + + /** + * check yarn state + * + * @param appIds + * @return + */ + public boolean isSuccessOfYarnState(List appIds) { + + boolean result = true; + try { + for (String appId : appIds) { + while(true){ + ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId); + logger.info("appId:{}, final state:{}",appId,applicationStatus.name()); + if (applicationStatus.equals(ExecutionStatus.FAILURE) || + applicationStatus.equals(ExecutionStatus.KILL)) { + return false; + } + + if (applicationStatus.equals(ExecutionStatus.SUCCESS)){ + break; + } + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + } + } + } catch (Exception e) { + logger.error(String.format("yarn applications: %s status failed : " + e.getMessage(), appIds.toString()),e); + result = false; + } + return result; + + } + + /** + * get app links + * @param fileName + * @return + */ + private List getAppLinks(String fileName) { + List logs = convertFile2List(fileName); + + List appIds = new ArrayList(); + /** + * analysis log,get submited yarn application id + */ + for (String log : logs) { + + String appId = findAppId(log); + if (StringUtils.isNotEmpty(appId) && !appIds.contains(appId)) { + logger.info("find app id: {}", appId); + appIds.add(appId); + } + } + return appIds; + } + + /** + * convert file to list + * @param filename + * @return + */ + private List convertFile2List(String filename) { + List lineList = new ArrayList(100); + File file=new File(filename); + + if (!file.exists()){ + return lineList; + } + + BufferedReader br = null; + try { + br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), StandardCharsets.UTF_8)); + String line = null; + while ((line = br.readLine()) != null) { + lineList.add(line); + } + } catch (Exception e) { + logger.error(String.format("read file: %s failed : ",filename),e); + } finally { + if(br != null){ + try { + br.close(); + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + } + + } + return lineList; + } + + /** + * find app id + * + * @return appid + */ + private String findAppId(String line) { + Matcher matcher = APPLICATION_REGEX.matcher(line); + + if (matcher.find() && checkFindApp(line)) { + return matcher.group(); + } + + return null; + } + + + /** + * get remain time(s) + * + * @return + */ + private long getRemaintime() { + long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000; + long remainTime = timeout - usedTime; + + if (remainTime < 0) { + throw new RuntimeException("task execution time out"); + } + + return remainTime; + } + + /** + * get process id + * + * @param process + * @return + */ + private int getProcessId(Process process) { + int processId = 0; + + try { + Field f = process.getClass().getDeclaredField(Constants.PID); + f.setAccessible(true); + + processId = f.getInt(process); + } catch (Throwable e) { + logger.error(e.getMessage(), e); + } + + return processId; + } + + /** + * when log buffer siz or flush time reach condition , then flush + * + * @param lastFlushTime last flush time + * @return + */ + private long flush(long lastFlushTime) { + long now = System.currentTimeMillis(); + + /** + * when log buffer siz or flush time reach condition , then flush + */ + if (logBuffer.size() >= Constants.defaultLogRowsNum || now - lastFlushTime > Constants.defaultLogFlushInterval) { + lastFlushTime = now; + /** log handle */ + logHandler.accept(logBuffer); + + logBuffer.clear(); + } + return lastFlushTime; + } + + /** + * close buffer reader + * + * @param inReader + */ + private void close(BufferedReader inReader) { + if (inReader != null) { + try { + inReader.close(); + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + } + } + + + protected abstract String buildCommandFilePath(); + protected abstract String commandType(); + protected abstract boolean checkFindApp(String line); + protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException; +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java new file mode 100644 index 0000000000..534bf40301 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskRecordStatus; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; +import org.apache.dolphinscheduler.common.task.mr.MapreduceParameters; +import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; +import org.apache.dolphinscheduler.common.task.python.PythonParameters; +import org.apache.dolphinscheduler.common.task.shell.ShellParameters; +import org.apache.dolphinscheduler.common.task.spark.SparkParameters; +import org.apache.dolphinscheduler.common.task.sql.SqlParameters; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.TaskRecordDao; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; + +import java.util.List; +import java.util.Map; + +/** + * executive task + */ +public abstract class AbstractTask { + + /** + * task props + **/ + protected TaskProps taskProps; + + /** + * log record + */ + protected Logger logger; + + + /** + * cancel + */ + protected volatile boolean cancel = false; + + /** + * exit code + */ + protected volatile int exitStatusCode = -1; + + /** + * @param taskProps + * @param logger + */ + protected AbstractTask(TaskProps taskProps, Logger logger) { + this.taskProps = taskProps; + this.logger = logger; + } + + /** + * init task + */ + public void init() throws Exception { + } + + /** + * task handle + */ + public abstract void handle() throws Exception; + + + + public void cancelApplication(boolean status) throws Exception { + this.cancel = status; + } + + /** + * log process + */ + public void logHandle(List logs) { + // note that the "new line" is added here to facilitate log parsing + logger.info(" -> {}", String.join("\n\t", logs)); + } + + + /** + * exit code + */ + public int getExitStatusCode() { + return exitStatusCode; + } + + public void setExitStatusCode(int exitStatusCode) { + this.exitStatusCode = exitStatusCode; + } + + /** + * get task parameters + */ + public abstract AbstractParameters getParameters(); + + + /** + * result processing + */ + public void after(){ + if (getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ + // task recor flat : if true , start up qianfan + if (TaskRecordDao.getTaskRecordFlag() + && TaskType.typeIsNormalTask(taskProps.getTaskType())){ + AbstractParameters params = (AbstractParameters) JSONUtils.parseObject(taskProps.getTaskParams(), getCurTaskParamsClass()); + + // replace placeholder + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + params.getLocalParametersMap(), + taskProps.getCmdTypeIfComplement(), + taskProps.getScheduleTime()); + if (paramsMap != null && !paramsMap.isEmpty() + && paramsMap.containsKey("v_proc_date")){ + String vProcDate = paramsMap.get("v_proc_date").getValue(); + if (!StringUtils.isEmpty(vProcDate)){ + TaskRecordStatus taskRecordState = TaskRecordDao.getTaskRecordState(taskProps.getNodeName(), vProcDate); + logger.info("task record status : {}",taskRecordState); + if (taskRecordState == TaskRecordStatus.FAILURE){ + setExitStatusCode(Constants.EXIT_CODE_FAILURE); + } + } + } + } + + }else if (getExitStatusCode() == Constants.EXIT_CODE_KILL){ + setExitStatusCode(Constants.EXIT_CODE_KILL); + }else { + setExitStatusCode(Constants.EXIT_CODE_FAILURE); + } + } + + + + + /** + * get current task parameter class + * @return + */ + private Class getCurTaskParamsClass(){ + Class paramsClass = null; + // get task type + TaskType taskType = TaskType.valueOf(taskProps.getTaskType()); + switch (taskType){ + case SHELL: + paramsClass = ShellParameters.class; + break; + case SQL: + paramsClass = SqlParameters.class; + break; + case PROCEDURE: + paramsClass = ProcedureParameters.class; + break; + case MR: + paramsClass = MapreduceParameters.class; + break; + case SPARK: + paramsClass = SparkParameters.class; + break; + case FLINK: + paramsClass = FlinkParameters.class; + case PYTHON: + paramsClass = PythonParameters.class; + break; + default: + logger.error("not support this task type: {}", taskType); + throw new IllegalArgumentException("not support this task type"); + } + return paramsClass; + } + + /** + * get exit status according to exitCode + * @return + */ + public ExecutionStatus getExitStatus(){ + ExecutionStatus status; + switch (getExitStatusCode()){ + case Constants.EXIT_CODE_SUCCESS: + status = ExecutionStatus.SUCCESS; + break; + case Constants.EXIT_CODE_KILL: + status = ExecutionStatus.KILL; + break; + default: + status = ExecutionStatus.FAILURE; + break; + } + return status; + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java new file mode 100644 index 0000000000..7af1af5318 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task; + +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.slf4j.Logger; + +import java.io.IOException; + +/** + * abstract yarn task + */ +public abstract class AbstractYarnTask extends AbstractTask { + + /** + * process instance + */ + + /** + * process task + */ + private ShellCommandExecutor shellCommandExecutor; + + /** + * process database access + */ + protected ProcessDao processDao; + + /** + * @param taskProps + * @param logger + * @throws IOException + */ + public AbstractYarnTask(TaskProps taskProps, Logger logger) { + super(taskProps, logger); + this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); + this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, + taskProps.getTaskDir(), + taskProps.getTaskAppId(), + taskProps.getTaskInstId(), + taskProps.getTenantCode(), + taskProps.getEnvFile(), + taskProps.getTaskStartTime(), + taskProps.getTaskTimeout(), + logger); + } + + @Override + public void handle() throws Exception { + try { + // construct process + exitStatusCode = shellCommandExecutor.run(buildCommand(), processDao); + } catch (Exception e) { + logger.error("yarn process failure", e); + exitStatusCode = -1; + } + } + + @Override + public void cancelApplication(boolean status) throws Exception { + cancel = true; + // cancel process + shellCommandExecutor.cancelApplication(); + TaskInstance taskInstance = processDao.findTaskInstanceById(taskProps.getTaskInstId()); + if (status && taskInstance != null){ + ProcessUtils.killYarnJob(taskInstance); + } + } + + /** + * create command + */ + protected abstract String buildCommand() throws Exception; +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java new file mode 100644 index 0000000000..af4b3512f3 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Date; +import java.util.List; +import java.util.function.Consumer; + +/** + * python command executor + */ +public class PythonCommandExecutor extends AbstractCommandExecutor { + + private static final Logger logger = LoggerFactory.getLogger(PythonCommandExecutor.class); + + public static final String PYTHON = "python"; + + + + public PythonCommandExecutor(Consumer> logHandler, + String taskDir, + String taskAppId, + int taskInstId, + String tenantCode, + String envFile, + Date startTime, + int timeout, + Logger logger) { + super(logHandler,taskDir,taskAppId,taskInstId,tenantCode, envFile, startTime, timeout, logger); + } + + + /** + * build command file path + * + * @return + */ + @Override + protected String buildCommandFilePath() { + return String.format("%s/py_%s.command", taskDir, taskAppId); + } + + /** + * create command file if not exists + * + * @param commandFile + * @throws IOException + */ + @Override + protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { + logger.info("tenantCode :{}, task dir:{}", tenantCode, taskDir); + + if (!Files.exists(Paths.get(commandFile))) { + logger.info("generate command file:{}", commandFile); + + StringBuilder sb = new StringBuilder(); + sb.append("#-*- encoding=utf8 -*-\n"); + + sb.append("\n\n"); + sb.append(execCommand); + logger.info(sb.toString()); + + // write data to file + FileUtils.writeStringToFile(new File(commandFile), + sb.toString(), + StandardCharsets.UTF_8); + } + } + + @Override + protected String commandType() { + String pythonHome = getPythonHome(envFile); + if (StringUtils.isEmpty(pythonHome)){ + return PYTHON; + } + return pythonHome; + } + + @Override + protected boolean checkFindApp(String line) { + return true; + } + + + /** + * get the absolute path of the Python command + * note : + * common.properties + * PYTHON_HOME configured under common.properties is Python absolute path, not PYTHON_HOME itself + * + * for example : + * your PYTHON_HOM is /opt/python3.7/ + * you must set PYTHON_HOME is /opt/python3.7/python under nder common.properties + * escheduler.env.path file. + * + * @param envPath + * @return + */ + private static String getPythonHome(String envPath){ + BufferedReader br = null; + StringBuilder sb = new StringBuilder(); + try { + br = new BufferedReader(new InputStreamReader(new FileInputStream(envPath))); + String line; + while ((line = br.readLine()) != null){ + if (line.contains(Constants.PYTHON_HOME)){ + sb.append(line); + break; + } + } + String result = sb.toString(); + if (org.apache.commons.lang.StringUtils.isEmpty(result)){ + return null; + } + String[] arrs = result.split(Constants.EQUAL_SIGN); + if (arrs.length == 2){ + return arrs[1]; + } + + }catch (IOException e){ + logger.error("read file failure",e); + }finally { + try { + if (br != null){ + br.close(); + } + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + } + return null; + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java new file mode 100644 index 0000000000..0b7493b59e --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task; + +import org.apache.commons.io.FileUtils; +import org.slf4j.Logger; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Date; +import java.util.List; +import java.util.function.Consumer; + +/** + * shell command executor + */ +public class ShellCommandExecutor extends AbstractCommandExecutor { + + public static final String SH = "sh"; + + + public ShellCommandExecutor(Consumer> logHandler, + String taskDir, + String taskAppId, + int taskInstId, + String tenantCode, + String envFile, + Date startTime, + int timeout, + Logger logger) { + super(logHandler,taskDir,taskAppId,taskInstId,tenantCode, envFile, startTime, timeout, logger); + } + + + @Override + protected String buildCommandFilePath() { + // command file + return String.format("%s/%s.command", taskDir, taskAppId); + } + + @Override + protected String commandType() { + return SH; + } + + @Override + protected boolean checkFindApp(String line) { + return line.contains(taskAppId); + } + + @Override + protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { + logger.info("tenantCode user:{}, task dir:{}", tenantCode, taskAppId); + + // create if non existence + if (!Files.exists(Paths.get(commandFile))) { + logger.info("create command file:{}", commandFile); + + StringBuilder sb = new StringBuilder(); + sb.append("#!/bin/sh\n"); + sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); + sb.append("cd $BASEDIR\n"); + + if (envFile != null) { + sb.append("source " + envFile + "\n"); + } + + sb.append("\n\n"); + sb.append(execCommand); + logger.info("command : {}",sb.toString()); + + // write data to file + FileUtils.writeStringToFile(new File(commandFile), sb.toString(), + Charset.forName("UTF-8")); + } + } + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java new file mode 100644 index 0000000000..e308a906a6 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task; + + +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.server.worker.task.dependent.DependentTask; +import org.apache.dolphinscheduler.server.worker.task.flink.FlinkTask; +import org.apache.dolphinscheduler.server.worker.task.http.HttpTask; +import org.apache.dolphinscheduler.server.worker.task.mr.MapReduceTask; +import org.apache.dolphinscheduler.server.worker.task.processdure.ProcedureTask; +import org.apache.dolphinscheduler.server.worker.task.python.PythonTask; +import org.apache.dolphinscheduler.server.worker.task.shell.ShellTask; +import org.apache.dolphinscheduler.server.worker.task.spark.SparkTask; +import org.apache.dolphinscheduler.server.worker.task.sql.SqlTask; +import org.apache.commons.lang3.EnumUtils; +import org.slf4j.Logger; + +/** + * task manaster + */ +public class TaskManager { + + + /** + * create new task + * @param taskType + * @param props + * @param logger + * @return + * @throws IllegalArgumentException + */ + public static AbstractTask newTask(String taskType, TaskProps props, Logger logger) + throws IllegalArgumentException { + switch (EnumUtils.getEnum(TaskType.class,taskType)) { + case SHELL: + return new ShellTask(props, logger); + case PROCEDURE: + return new ProcedureTask(props, logger); + case SQL: + return new SqlTask(props, logger); + case MR: + return new MapReduceTask(props, logger); + case SPARK: + return new SparkTask(props, logger); + case FLINK: + return new FlinkTask(props, logger); + case PYTHON: + return new PythonTask(props, logger); + case DEPENDENT: + return new DependentTask(props, logger); + case HTTP: + return new HttpTask(props, logger); + default: + logger.error("unsupport task type: {}", taskType); + throw new IllegalArgumentException("not support task type"); + } + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskProps.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskProps.java new file mode 100644 index 0000000000..340f35d0be --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskProps.java @@ -0,0 +1,290 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task; + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.DataType; +import org.apache.dolphinscheduler.common.enums.Direct; +import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; +import org.apache.dolphinscheduler.common.process.Property; + +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +/** + * task props + */ +public class TaskProps { + + /** + * task node name + **/ + private String nodeName; + + /** + * task instance id + **/ + private int taskInstId; + + /** + * tenant code , execute task linux user + **/ + private String tenantCode; + + private String taskType; + + /** + * task parameters + **/ + private String taskParams; + + /** + * task dir + **/ + private String taskDir; + + /** + * queue + **/ + private String queue; + + /** + * env file + **/ + private String envFile; + + /** + * defined params + **/ + private Map definedParams; + + /** + * task path + */ + private String taskAppId; + + /** + * task start time + */ + private Date taskStartTime; + + /** + * task timeout + */ + private int taskTimeout; + + /** + * task timeout strategy + */ + private TaskTimeoutStrategy taskTimeoutStrategy; + /** + * task dependence + */ + private String dependence; + + /** + * schedule time + * @return + */ + private Date scheduleTime; + + /** + * command type is complement + */ + private CommandType cmdTypeIfComplement; + + + public TaskProps(){} + public TaskProps(String taskParams, + String taskDir, + Date scheduleTime, + String nodeName, + String taskType, + int taskInstId, + String envFile, + String tenantCode, + String queue, + Date taskStartTime, + Map definedParams, + String dependence, + CommandType cmdTypeIfComplement){ + this.taskParams = taskParams; + this.taskDir = taskDir; + this.scheduleTime = scheduleTime; + this.nodeName = nodeName; + this.taskType = taskType; + this.taskInstId = taskInstId; + this.envFile = envFile; + this.tenantCode = tenantCode; + this.queue = queue; + this.taskStartTime = taskStartTime; + this.definedParams = definedParams; + this.dependence = dependence; + this.cmdTypeIfComplement = cmdTypeIfComplement; + + } + + public String getTenantCode() { + return tenantCode; + } + + public void setTenantCode(String tenantCode) { + this.tenantCode = tenantCode; + } + + public String getTaskParams() { + return taskParams; + } + + public void setTaskParams(String taskParams) { + this.taskParams = taskParams; + } + + public String getTaskDir() { + return taskDir; + } + + public void setTaskDir(String taskDir) { + this.taskDir = taskDir; + } + + public Map getDefinedParams() { + return definedParams; + } + + public void setDefinedParams(Map definedParams) { + this.definedParams = definedParams; + } + + public String getEnvFile() { + return envFile; + } + + public void setEnvFile(String envFile) { + this.envFile = envFile; + } + + + public String getNodeName() { + return nodeName; + } + + public void setNodeName(String nodeName) { + this.nodeName = nodeName; + } + + public int getTaskInstId() { + return taskInstId; + } + + public void setTaskInstId(int taskInstId) { + this.taskInstId = taskInstId; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + + public String getTaskAppId() { + return taskAppId; + } + + public void setTaskAppId(String taskAppId) { + this.taskAppId = taskAppId; + } + + public Date getTaskStartTime() { + return taskStartTime; + } + + public void setTaskStartTime(Date taskStartTime) { + this.taskStartTime = taskStartTime; + } + + public int getTaskTimeout() { + return taskTimeout; + } + + public void setTaskTimeout(int taskTimeout) { + this.taskTimeout = taskTimeout; + } + + public TaskTimeoutStrategy getTaskTimeoutStrategy() { + return taskTimeoutStrategy; + } + + public void setTaskTimeoutStrategy(TaskTimeoutStrategy taskTimeoutStrategy) { + this.taskTimeoutStrategy = taskTimeoutStrategy; + } + + public String getTaskType() { + return taskType; + } + + public void setTaskType(String taskType) { + this.taskType = taskType; + } + + public String getDependence() { + return dependence; + } + + public void setDependence(String dependence) { + this.dependence = dependence; + } + + public Date getScheduleTime() { + return scheduleTime; + } + + public void setScheduleTime(Date scheduleTime) { + this.scheduleTime = scheduleTime; + } + + public CommandType getCmdTypeIfComplement() { + return cmdTypeIfComplement; + } + + public void setCmdTypeIfComplement(CommandType cmdTypeIfComplement) { + this.cmdTypeIfComplement = cmdTypeIfComplement; + } + + /** + * get parameters map + * @return + */ + public Map getUserDefParamsMap() { + if (definedParams != null) { + Map userDefParamsMaps = new HashMap<>(); + Iterator> iter = definedParams.entrySet().iterator(); + while (iter.hasNext()){ + Map.Entry en = iter.next(); + Property property = new Property(en.getKey(), Direct.IN, DataType.VARCHAR , en.getValue()); + userDefParamsMaps.put(property.getProp(),property); + } + return userDefParamsMaps; + } + return null; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java new file mode 100644 index 0000000000..798311aba8 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java @@ -0,0 +1,211 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.dependent; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.DependentRelation; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.model.DateInterval; +import org.apache.dolphinscheduler.common.model.DependentItem; +import org.apache.dolphinscheduler.common.utils.DependentUtils; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + * dependent item execute + */ +public class DependentExecute { + /** + * process dao + */ + private static final ProcessDao processDao = DaoFactory.getDaoInstance(ProcessDao.class); + + private List dependItemList; + private DependentRelation relation; + + private DependResult modelDependResult = DependResult.WAITING; + private Map dependResultMap = new HashMap<>(); + + private Logger logger = LoggerFactory.getLogger(DependentExecute.class); + + public DependentExecute(List itemList, DependentRelation relation){ + this.dependItemList = itemList; + this.relation = relation; + } + + /** + * get dependent item for one dependent item + * @param dependentItem + * @return + */ + public DependResult getDependentResultForItem(DependentItem dependentItem, Date currentTime){ + List dateIntervals = DependentUtils.getDateIntervalList(currentTime, dependentItem.getDateValue()); + return calculateResultForTasks(dependentItem, dateIntervals ); + } + + /** + * calculate dependent result for one dependent item. + * @param dependentItem + * @param dateIntervals + * @return + */ + private DependResult calculateResultForTasks(DependentItem dependentItem, + List dateIntervals) { + DependResult result = DependResult.FAILED; + for(DateInterval dateInterval : dateIntervals){ + ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(), + dateInterval); + if(processInstance == null){ + logger.error("cannot find the right process instance: definition id:{}, start:{}, end:{}", + dependentItem.getDefinitionId(), dateInterval.getStartTime(), dateInterval.getEndTime() ); + return DependResult.FAILED; + } + if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){ + result = getDependResultByState(processInstance.getState()); + }else{ + TaskInstance taskInstance = null; + List taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId()); + + for(TaskInstance task : taskInstanceList){ + if(task.getName().equals(dependentItem.getDepTasks())){ + taskInstance = task; + break; + } + } + if(taskInstance == null){ + // cannot find task in the process instance + // maybe because process instance is running or failed. + result = getDependResultByState(processInstance.getState()); + }else{ + result = getDependResultByState(taskInstance.getState()); + } + } + if(result != DependResult.SUCCESS){ + break; + } + } + return result; + } + + /** + * find the last one process instance that : + * 1. manual run and finish between the interval + * 2. schedule run and schedule time between the interval + * @param definitionId + * @param dateInterval + * @return + */ + private ProcessInstance findLastProcessInterval(int definitionId, DateInterval dateInterval) { + + ProcessInstance runningProcess = processDao.findLastRunningProcess(definitionId, dateInterval); + if(runningProcess != null){ + return runningProcess; + } + + ProcessInstance lastSchedulerProcess = processDao.findLastSchedulerProcessInterval( + definitionId, dateInterval + ); + + ProcessInstance lastManualProcess = processDao.findLastManualProcessInterval( + definitionId, dateInterval + ); + + if(lastManualProcess ==null){ + return lastSchedulerProcess; + } + if(lastSchedulerProcess == null){ + return lastManualProcess; + } + + return (lastManualProcess.getEndTime().after(lastSchedulerProcess.getEndTime()))? + lastManualProcess : lastSchedulerProcess; + } + + /** + * get dependent result by task/process instance state + * @param state + * @return + */ + private DependResult getDependResultByState(ExecutionStatus state) { + + if(state.typeIsRunning() || state == ExecutionStatus.SUBMITTED_SUCCESS || state == ExecutionStatus.WAITTING_THREAD){ + return DependResult.WAITING; + }else if(state.typeIsSuccess()){ + return DependResult.SUCCESS; + }else{ + return DependResult.FAILED; + } + } + + /** + * judge depend item finished + * @return + */ + public boolean finish(Date currentTime){ + if(modelDependResult == DependResult.WAITING){ + modelDependResult = getModelDependResult(currentTime); + return false; + } + return true; + } + + /** + * get model depend result + * @return + */ + public DependResult getModelDependResult(Date currentTime){ + + List dependResultList = new ArrayList<>(); + + for(DependentItem dependentItem : dependItemList){ + DependResult dependResult = getDependResultForItem(dependentItem, currentTime); + if(dependResult != DependResult.WAITING){ + dependResultMap.put(dependentItem.getKey(), dependResult); + } + dependResultList.add(dependResult); + } + modelDependResult = DependentUtils.getDependResultForRelation( + this.relation, dependResultList + ); + return modelDependResult; + } + + /** + * get dependent item result + * @param item + * @return + */ + public DependResult getDependResultForItem(DependentItem item, Date currentTime){ + String key = item.getKey(); + if(dependResultMap.containsKey(key)){ + return dependResultMap.get(key); + } + return getDependentResultForItem(item, currentTime); + } + + public Map getDependResultMap(){ + return dependResultMap; + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java new file mode 100644 index 0000000000..8510265869 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java @@ -0,0 +1,172 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.dependent; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.model.DependentTaskModel; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.utils.DependentUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.slf4j.Logger; + +import java.util.*; + +import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT; + +public class DependentTask extends AbstractTask { + + private List dependentTaskList = new ArrayList<>(); + + /** + * depend item result map + * save the result to log file + */ + private Map dependResultMap = new HashMap<>(); + + private DependentParameters dependentParameters; + + private Date dependentDate; + + private ProcessDao processDao; + + public DependentTask(TaskProps props, Logger logger) { + super(props, logger); + } + + @Override + public void init(){ + logger.info("dependent task initialize"); + + this.dependentParameters = JSONUtils.parseObject(this.taskProps.getDependence(), + DependentParameters.class); + + for(DependentTaskModel taskModel : dependentParameters.getDependTaskList()){ + this.dependentTaskList.add(new DependentExecute( + taskModel.getDependItemList(), taskModel.getRelation())); + } + + this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); + + if(taskProps.getScheduleTime() != null){ + this.dependentDate = taskProps.getScheduleTime(); + }else{ + this.dependentDate = taskProps.getTaskStartTime(); + } + + } + + @Override + public void handle(){ + // set the name of the current thread + String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); + Thread.currentThread().setName(threadLoggerInfoName); + + try{ + TaskInstance taskInstance = null; + while(Stopper.isRunning()){ + taskInstance = processDao.findTaskInstanceById(this.taskProps.getTaskInstId()); + + if(taskInstance == null){ + exitStatusCode = -1; + break; + } + + if(taskInstance.getState() == ExecutionStatus.KILL){ + this.cancel = true; + } + + if(this.cancel || allDependentTaskFinish()){ + break; + } + + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + } + + if(cancel){ + exitStatusCode = Constants.EXIT_CODE_KILL; + }else{ + DependResult result = getTaskDependResult(); + exitStatusCode = (result == DependResult.SUCCESS) ? + Constants.EXIT_CODE_SUCCESS : Constants.EXIT_CODE_FAILURE; + } + }catch (Exception e){ + logger.error(e.getMessage(),e); + exitStatusCode = -1; + } + } + + /** + * get dependent result + * @return + */ + private DependResult getTaskDependResult(){ + List dependResultList = new ArrayList<>(); + for(DependentExecute dependentExecute : dependentTaskList){ + DependResult dependResult = dependentExecute.getModelDependResult(dependentDate); + dependResultList.add(dependResult); + } + DependResult result = DependentUtils.getDependResultForRelation( + this.dependentParameters.getRelation(), dependResultList + ); + return result; + } + + /** + * judge all dependent tasks finish + * @return + */ + private boolean allDependentTaskFinish(){ + boolean finish = true; + for(DependentExecute dependentExecute : dependentTaskList){ + Map resultMap = dependentExecute.getDependResultMap(); + Set keySet = resultMap.keySet(); + for(String key : keySet){ + if(!dependResultMap.containsKey(key)){ + dependResultMap.put(key, resultMap.get(key)); + //save depend result to log + logger.info("dependent item complete {} {},{}", + DEPENDENT_SPLIT, key, resultMap.get(key).toString()); + } + } + if(!dependentExecute.finish(dependentDate)){ + finish = false; + } + } + return finish; + } + + + @Override + public void cancelApplication(boolean cancelApplication) throws Exception { + // cancel process + this.cancel = true; + } + + @Override + public AbstractParameters getParameters() { + return null; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java new file mode 100644 index 0000000000..de50c52ed6 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.flink; + +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.server.utils.FlinkArgsUtils; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * flink task + */ +public class FlinkTask extends AbstractYarnTask { + + /** + * flink command + */ + private static final String FLINK_COMMAND = "flink"; + private static final String FLINK_RUN = "run"; + + /** + * flink parameters + */ + private FlinkParameters flinkParameters; + + public FlinkTask(TaskProps props, Logger logger) { + super(props, logger); + } + + @Override + public void init() { + + logger.info("flink task params {}", taskProps.getTaskParams()); + + flinkParameters = JSONUtils.parseObject(taskProps.getTaskParams(), FlinkParameters.class); + + if (!flinkParameters.checkParameters()) { + throw new RuntimeException("flink task params is not valid"); + } + flinkParameters.setQueue(taskProps.getQueue()); + + if (StringUtils.isNotEmpty(flinkParameters.getMainArgs())) { + String args = flinkParameters.getMainArgs(); + // get process instance by task instance id + ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); + + /** + * combining local and global parameters + */ + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + flinkParameters.getLocalParametersMap(), + processInstance.getCmdTypeIfComplement(), + processInstance.getScheduleTime()); + + logger.info("param Map : {}", paramsMap); + if (paramsMap != null ){ + + args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap)); + logger.info("param args : {}", args); + } + flinkParameters.setMainArgs(args); + } + } + + /** + * create command + * @return + */ + @Override + protected String buildCommand() { + List args = new ArrayList<>(); + + args.add(FLINK_COMMAND); + args.add(FLINK_RUN); + logger.info("flink task args : {}", args); + // other parameters + args.addAll(FlinkArgsUtils.buildArgs(flinkParameters)); + + String command = ParameterUtils + .convertParameterPlaceholders(String.join(" ", args), taskProps.getDefinedParams()); + + logger.info("flink task command : {}", command); + + return command; + } + + @Override + public AbstractParameters getParameters() { + return flinkParameters; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java new file mode 100644 index 0000000000..47f6f83158 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java @@ -0,0 +1,270 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.http; + + +import org.apache.dolphinscheduler.common.enums.HttpMethod; +import org.apache.dolphinscheduler.common.enums.HttpParametersType; +import org.apache.dolphinscheduler.common.process.HttpProperty; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.http.HttpParameters; +import org.apache.dolphinscheduler.common.utils.Bytes; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.io.Charsets; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpEntity; +import org.apache.http.ParseException; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.client.methods.RequestBuilder; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * http task + */ +public class HttpTask extends AbstractTask { + + private HttpParameters httpParameters; + + /** + * process database access + */ + private ProcessDao processDao; + + /** + * Convert mill seconds to second unit + */ + protected static final int MAX_CONNECTION_MILLISECONDS = 60000; + + protected static final String APPLICATION_JSON = "application/json"; + + protected String output; + + + public HttpTask(TaskProps props, Logger logger) { + super(props, logger); + this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); + } + + @Override + public void init() { + logger.info("http task params {}", taskProps.getTaskParams()); + this.httpParameters = JSONObject.parseObject(taskProps.getTaskParams(), HttpParameters.class); + + if (!httpParameters.checkParameters()) { + throw new RuntimeException("http task params is not valid"); + } + } + + @Override + public void handle() throws Exception { + String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); + Thread.currentThread().setName(threadLoggerInfoName); + + long startTime = System.currentTimeMillis(); + String statusCode = null; + String body = null; + try(CloseableHttpClient client = createHttpClient()) { + try(CloseableHttpResponse response = sendRequest(client)) { + statusCode = String.valueOf(getStatusCode(response)); + body = getResponseBody(response); + exitStatusCode = validResponse(body, statusCode); + long costTime = System.currentTimeMillis() - startTime; + logger.info("startTime: {}, httpUrl: {}, httpMethod: {}, costTime : {}Millisecond, statusCode : {}, body : {}, log : {}", + DateUtils.format2Readable(startTime), httpParameters.getUrl(),httpParameters.getHttpMethod(), costTime, statusCode, body, output); + }catch (Exception e) { + appendMessage(e.toString()); + exitStatusCode = -1; + logger.error("httpUrl[" + httpParameters.getUrl() + "] connection failed:"+output, e); + } + } catch (Exception e) { + appendMessage(e.toString()); + exitStatusCode = -1; + logger.error("httpUrl[" + httpParameters.getUrl() + "] connection failed:"+output, e); + } + } + + protected CloseableHttpResponse sendRequest(CloseableHttpClient client) throws IOException { + RequestBuilder builder = createRequestBuilder(); + ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); + + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + httpParameters.getLocalParametersMap(), + processInstance.getCmdTypeIfComplement(), + processInstance.getScheduleTime()); + List httpPropertyList = new ArrayList<>(); + if(httpParameters.getHttpParams() != null && httpParameters.getHttpParams().size() > 0){ + for (HttpProperty httpProperty: httpParameters.getHttpParams()) { + String jsonObject = JSONObject.toJSONString(httpProperty); + String params = ParameterUtils.convertParameterPlaceholders(jsonObject,ParamUtils.convert(paramsMap)); + logger.info("http request params:{}",params); + httpPropertyList.add(JSONObject.parseObject(params,HttpProperty.class)); + } + } + addRequestParams(builder,httpPropertyList); + HttpUriRequest request = builder.setUri(httpParameters.getUrl()).build(); + setHeaders(request,httpPropertyList); + return client.execute(request); + } + + protected String getResponseBody(CloseableHttpResponse httpResponse) throws ParseException, IOException { + if (httpResponse == null) { + return null; + } + HttpEntity entity = httpResponse.getEntity(); + if (entity == null) { + return null; + } + String webPage = EntityUtils.toString(entity, Bytes.UTF8_ENCODING); + return webPage; + } + + protected int getStatusCode(CloseableHttpResponse httpResponse) { + int status = httpResponse.getStatusLine().getStatusCode(); + return status; + } + + protected int validResponse(String body, String statusCode){ + int exitStatusCode = 0; + switch (httpParameters.getHttpCheckCondition()) { + case BODY_CONTAINS: + if (StringUtils.isEmpty(body) || !body.contains(httpParameters.getCondition())) { + appendMessage(httpParameters.getUrl() + " doesn contain " + + httpParameters.getCondition()); + exitStatusCode = -1; + } + break; + case BODY_NOT_CONTAINS: + if (StringUtils.isEmpty(body) || body.contains(httpParameters.getCondition())) { + appendMessage(httpParameters.getUrl() + " contains " + + httpParameters.getCondition()); + exitStatusCode = -1; + } + break; + case STATUS_CODE_CUSTOM: + if (!statusCode.equals(httpParameters.getCondition())) { + appendMessage(httpParameters.getUrl() + " statuscode: " + statusCode + ", Must be: " + httpParameters.getCondition()); + exitStatusCode = -1; + } + break; + default: + if (!"200".equals(statusCode)) { + appendMessage(httpParameters.getUrl() + " statuscode: " + statusCode + ", Must be: 200"); + exitStatusCode = -1; + } + break; + } + return exitStatusCode; + } + + public String getOutput() { + return output; + } + + protected void appendMessage(String message) { + if (output == null) { + output = ""; + } + if (message != null && !message.trim().isEmpty()) { + output += message; + } + } + + protected void addRequestParams(RequestBuilder builder,List httpPropertyList) { + if(httpPropertyList != null && httpPropertyList.size() > 0){ + JSONObject jsonParam = new JSONObject(); + for (HttpProperty property: httpPropertyList){ + if(property.getHttpParametersType() != null){ + if (property.getHttpParametersType().equals(HttpParametersType.PARAMETER)){ + builder.addParameter(property.getProp(), property.getValue()); + }else if(property.getHttpParametersType().equals(HttpParametersType.BODY)){ + jsonParam.put(property.getProp(), property.getValue()); + } + } + } + StringEntity postingString = new StringEntity(jsonParam.toString(), Charsets.UTF_8); + postingString.setContentEncoding(Bytes.UTF8_ENCODING); + postingString.setContentType(APPLICATION_JSON); + builder.setEntity(postingString); + } + } + + protected void setHeaders(HttpUriRequest request,List httpPropertyList) { + if(httpPropertyList != null && httpPropertyList.size() > 0){ + for (HttpProperty property: httpPropertyList){ + if(property.getHttpParametersType() != null) { + if (property.getHttpParametersType().equals(HttpParametersType.HEADERS)) { + request.addHeader(property.getProp(), property.getValue()); + } + } + } + } + } + + protected CloseableHttpClient createHttpClient() { + final RequestConfig requestConfig = requestConfig(); + HttpClientBuilder httpClientBuilder; + httpClientBuilder = HttpClients.custom().setDefaultRequestConfig(requestConfig); + return httpClientBuilder.build(); + } + + private RequestConfig requestConfig() { + return RequestConfig.custom().setSocketTimeout(MAX_CONNECTION_MILLISECONDS).setConnectTimeout(MAX_CONNECTION_MILLISECONDS).build(); + } + + protected RequestBuilder createRequestBuilder() { + if (httpParameters.getHttpMethod().equals(HttpMethod.GET)) { + return RequestBuilder.get(); + } else if (httpParameters.getHttpMethod().equals(HttpMethod.POST)) { + return RequestBuilder.post(); + } else if (httpParameters.getHttpMethod().equals(HttpMethod.HEAD)) { + return RequestBuilder.head(); + } else if (httpParameters.getHttpMethod().equals(HttpMethod.PUT)) { + return RequestBuilder.put(); + } else if (httpParameters.getHttpMethod().equals(HttpMethod.DELETE)) { + return RequestBuilder.delete(); + } else { + return null; + } + } + + @Override + public AbstractParameters getParameters() { + return this.httpParameters; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java new file mode 100644 index 0000000000..ec61643523 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java @@ -0,0 +1,144 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.mr; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.mr.MapreduceParameters; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * mapreduce task + */ +public class MapReduceTask extends AbstractYarnTask { + + + /** + * mapreduce parameters + */ + private MapreduceParameters mapreduceParameters; + + /** + * @param props + * @param logger + */ + public MapReduceTask(TaskProps props, Logger logger) { + super(props, logger); + } + + @Override + public void init() { + + logger.info("mapreduce task params {}", taskProps.getTaskParams()); + + this.mapreduceParameters = JSONUtils.parseObject(taskProps.getTaskParams(), MapreduceParameters.class); + + // check parameters + if (!mapreduceParameters.checkParameters()) { + throw new RuntimeException("mapreduce task params is not valid"); + } + + mapreduceParameters.setQueue(taskProps.getQueue()); + + // replace placeholder + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + mapreduceParameters.getLocalParametersMap(), + taskProps.getCmdTypeIfComplement(), + taskProps.getScheduleTime()); + if (paramsMap != null){ + String args = ParameterUtils.convertParameterPlaceholders(mapreduceParameters.getMainArgs(), ParamUtils.convert(paramsMap)); + mapreduceParameters.setMainArgs(args); + if(mapreduceParameters.getProgramType() != null && mapreduceParameters.getProgramType() == ProgramType.PYTHON){ + String others = ParameterUtils.convertParameterPlaceholders(mapreduceParameters.getOthers(), ParamUtils.convert(paramsMap)); + mapreduceParameters.setOthers(others); + } + } + } + + @Override + protected String buildCommand() throws Exception { + List parameterList = buildParameters(mapreduceParameters); + + String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", parameterList), + taskProps.getDefinedParams()); + logger.info("mapreduce task command: {}", command); + + return command; + } + + @Override + public AbstractParameters getParameters() { + return mapreduceParameters; + } + + + private List buildParameters(MapreduceParameters mapreduceParameters){ + + List result = new ArrayList<>(); + + result.add(Constants.HADOOP); + + // main jar + if(mapreduceParameters.getMainJar()!= null){ + result.add(Constants.JAR); + result.add(mapreduceParameters.getMainJar().getRes()); + } + + // main class + if(mapreduceParameters.getProgramType() !=null ){ + if(mapreduceParameters.getProgramType()!= ProgramType.PYTHON){ + if(StringUtils.isNotEmpty(mapreduceParameters.getMainClass())){ + result.add(mapreduceParameters.getMainClass()); + } + } + } + + // others + if (StringUtils.isNotEmpty(mapreduceParameters.getOthers())) { + String others = mapreduceParameters.getOthers(); + if(!others.contains(Constants.MR_QUEUE)){ + if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) { + result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue())); + } + } + result.add(mapreduceParameters.getOthers()); + }else if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) { + result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue())); + + } + + // command args + if(StringUtils.isNotEmpty(mapreduceParameters.getMainArgs())){ + result.add(mapreduceParameters.getMainArgs()); + } + return result; + } +} + diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java new file mode 100644 index 0000000000..7a6aaac289 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java @@ -0,0 +1,347 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.processdure; + +import org.apache.dolphinscheduler.common.enums.DataType; +import org.apache.dolphinscheduler.common.enums.Direct; +import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; +import org.apache.dolphinscheduler.common.job.db.BaseDataSource; +import org.apache.dolphinscheduler.common.job.db.DataSourceFactory; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import com.alibaba.fastjson.JSONObject; +import com.cronutils.utils.StringUtils; +import org.slf4j.Logger; + +import java.sql.*; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import static org.apache.dolphinscheduler.common.enums.DataType.*; + +/** + * procedure task + */ +public class ProcedureTask extends AbstractTask { + + /** + * procedure parameters + */ + private ProcedureParameters procedureParameters; + + /** + * process database access + */ + private ProcessDao processDao; + + /** + * base datasource + */ + private BaseDataSource baseDataSource; + + public ProcedureTask(TaskProps taskProps, Logger logger) { + super(taskProps, logger); + + logger.info("procedure task params {}", taskProps.getTaskParams()); + + this.procedureParameters = JSONObject.parseObject(taskProps.getTaskParams(), ProcedureParameters.class); + + // check parameters + if (!procedureParameters.checkParameters()) { + throw new RuntimeException("procedure task params is not valid"); + } + + this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); + } + + @Override + public void handle() throws Exception { + // set the name of the current thread + String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); + Thread.currentThread().setName(threadLoggerInfoName); + + logger.info("processdure type : {}, datasource : {}, method : {} , localParams : {}", + procedureParameters.getType(), + procedureParameters.getDatasource(), + procedureParameters.getMethod(), + procedureParameters.getLocalParams()); + + // determine whether there is a data source + if (procedureParameters.getDatasource() == 0){ + logger.error("datasource id not exists"); + exitStatusCode = -1; + return; + } + + DataSource dataSource = processDao.findDataSourceById(procedureParameters.getDatasource()); + logger.info("datasource name : {} , type : {} , desc : {} , user_id : {} , parameter : {}", + dataSource.getName(), + dataSource.getType(), + dataSource.getNote(), + dataSource.getUserId(), + dataSource.getConnectionParams()); + + if (dataSource == null){ + logger.error("datasource not exists"); + exitStatusCode = -1; + return; + } + Connection connection = null; + CallableStatement stmt = null; + try { + // load class + DataSourceFactory.loadClass(dataSource.getType()); + // get datasource + baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), + dataSource.getConnectionParams()); + + // get jdbc connection + connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), + baseDataSource.getUser(), + baseDataSource.getPassword()); + + + + // combining local and global parameters + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + procedureParameters.getLocalParametersMap(), + taskProps.getCmdTypeIfComplement(), + taskProps.getScheduleTime()); + + + Collection userDefParamsList = null; + + if (procedureParameters.getLocalParametersMap() != null){ + userDefParamsList = procedureParameters.getLocalParametersMap().values(); + } + + String method = ""; + // no parameters + if (CollectionUtils.isEmpty(userDefParamsList)){ + method = "{call " + procedureParameters.getMethod() + "}"; + }else { // exists parameters + int size = userDefParamsList.size(); + StringBuilder parameter = new StringBuilder(); + parameter.append("("); + for (int i = 0 ;i < size - 1; i++){ + parameter.append("?,"); + } + parameter.append("?)"); + method = "{call " + procedureParameters.getMethod() + parameter.toString()+ "}"; + } + + logger.info("call method : {}",method); + // call method + stmt = connection.prepareCall(method); + if(taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.FAILED || taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.WARNFAILED){ + stmt.setQueryTimeout(taskProps.getTaskTimeout()); + } + Map outParameterMap = new HashMap<>(); + if (userDefParamsList != null && userDefParamsList.size() > 0){ + int index = 1; + for (Property property : userDefParamsList){ + logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}" + ,property.getProp(), + property.getDirect(), + property.getType(), + property.getValue()); + // set parameters + if (property.getDirect().equals(Direct.IN)){ + ParameterUtils.setInParameter(index,stmt,property.getType(),paramsMap.get(property.getProp()).getValue()); + }else if (property.getDirect().equals(Direct.OUT)){ + setOutParameter(index,stmt,property.getType(),paramsMap.get(property.getProp()).getValue()); + property.setValue(paramsMap.get(property.getProp()).getValue()); + outParameterMap.put(index,property); + } + index++; + } + } + + stmt.executeUpdate(); + + /** + * print the output parameters to the log + */ + Iterator> iter = outParameterMap.entrySet().iterator(); + while (iter.hasNext()){ + Map.Entry en = iter.next(); + + int index = en.getKey(); + Property property = en.getValue(); + String prop = property.getProp(); + DataType dataType = property.getType(); + // get output parameter + getOutputParameter(stmt, index, prop, dataType); + } + + exitStatusCode = 0; + }catch (Exception e){ + logger.error(e.getMessage(),e); + exitStatusCode = -1; + throw new RuntimeException(String.format("process interrupted. exit status code is %d",exitStatusCode)); + } + finally { + if (stmt != null) { + try { + stmt.close(); + } catch (SQLException e) { + exitStatusCode = -1; + logger.error(e.getMessage(),e); + } + } + if (connection != null) { + try { + connection.close(); + } catch (SQLException e) { + exitStatusCode = -1; + logger.error(e.getMessage(), e); + } + } + } + } + + /** + * get output parameter + * @param stmt + * @param index + * @param prop + * @param dataType + * @throws SQLException + */ + private void getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException { + switch (dataType){ + case VARCHAR: + logger.info("out prameter key : {} , value : {}",prop,stmt.getString(index)); + break; + case INTEGER: + logger.info("out prameter key : {} , value : {}", prop, stmt.getInt(index)); + break; + case LONG: + logger.info("out prameter key : {} , value : {}",prop,stmt.getLong(index)); + break; + case FLOAT: + logger.info("out prameter key : {} , value : {}",prop,stmt.getFloat(index)); + break; + case DOUBLE: + logger.info("out prameter key : {} , value : {}",prop,stmt.getDouble(index)); + break; + case DATE: + logger.info("out prameter key : {} , value : {}",prop,stmt.getDate(index)); + break; + case TIME: + logger.info("out prameter key : {} , value : {}",prop,stmt.getTime(index)); + break; + case TIMESTAMP: + logger.info("out prameter key : {} , value : {}",prop,stmt.getTimestamp(index)); + break; + case BOOLEAN: + logger.info("out prameter key : {} , value : {}",prop, stmt.getBoolean(index)); + break; + default: + break; + } + } + + @Override + public AbstractParameters getParameters() { + return procedureParameters; + } + + /** + * set out parameter + * @param index + * @param stmt + * @param dataType + * @param value + * @throws Exception + */ + private void setOutParameter(int index,CallableStatement stmt,DataType dataType,String value)throws Exception{ + if (dataType.equals(VARCHAR)){ + if (StringUtils.isEmpty(value)){ + stmt.registerOutParameter(index, Types.VARCHAR); + }else { + stmt.registerOutParameter(index, Types.VARCHAR, value); + } + + }else if (dataType.equals(INTEGER)){ + if (StringUtils.isEmpty(value)){ + stmt.registerOutParameter(index, Types.INTEGER); + }else { + stmt.registerOutParameter(index, Types.INTEGER, value); + } + + }else if (dataType.equals(LONG)){ + if (StringUtils.isEmpty(value)){ + stmt.registerOutParameter(index,Types.INTEGER); + }else { + stmt.registerOutParameter(index,Types.INTEGER ,value); + } + }else if (dataType.equals(FLOAT)){ + if (StringUtils.isEmpty(value)){ + stmt.registerOutParameter(index, Types.FLOAT); + }else { + stmt.registerOutParameter(index, Types.FLOAT,value); + } + }else if (dataType.equals(DOUBLE)){ + if (StringUtils.isEmpty(value)){ + stmt.registerOutParameter(index, Types.DOUBLE); + }else { + stmt.registerOutParameter(index, Types.DOUBLE , value); + } + + }else if (dataType.equals(DATE)){ + if (StringUtils.isEmpty(value)){ + stmt.registerOutParameter(index, Types.DATE); + }else { + stmt.registerOutParameter(index, Types.DATE , value); + } + + }else if (dataType.equals(TIME)){ + if (StringUtils.isEmpty(value)){ + stmt.registerOutParameter(index, Types.TIME); + }else { + stmt.registerOutParameter(index, Types.TIME , value); + } + + }else if (dataType.equals(TIMESTAMP)){ + if (StringUtils.isEmpty(value)){ + stmt.registerOutParameter(index, Types.TIMESTAMP); + }else { + stmt.registerOutParameter(index, Types.TIMESTAMP , value); + } + + }else if (dataType.equals(BOOLEAN)){ + if (StringUtils.isEmpty(value)){ + stmt.registerOutParameter(index, Types.BOOLEAN); + }else { + stmt.registerOutParameter(index, Types.BOOLEAN , value); + } + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java new file mode 100644 index 0000000000..8a9903b09f --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.python; + + +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.python.PythonParameters; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.PythonCommandExecutor; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.slf4j.Logger; + + +import java.util.Map; + +/** + * python task + */ +public class PythonTask extends AbstractTask { + + /** + * python parameters + */ + private PythonParameters pythonParameters; + + /** + * task dir + */ + private String taskDir; + + /** + * python command executor + */ + private PythonCommandExecutor pythonCommandExecutor; + + /** + * process database access + */ + private ProcessDao processDao; + + + public PythonTask(TaskProps taskProps, Logger logger) { + super(taskProps, logger); + + this.taskDir = taskProps.getTaskDir(); + + this.pythonCommandExecutor = new PythonCommandExecutor(this::logHandle, + taskProps.getTaskDir(), + taskProps.getTaskAppId(), + taskProps.getTaskInstId(), + taskProps.getTenantCode(), + taskProps.getEnvFile(), + taskProps.getTaskStartTime(), + taskProps.getTaskTimeout(), + logger); + this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); + } + + @Override + public void init() { + logger.info("python task params {}", taskProps.getTaskParams()); + + pythonParameters = JSONUtils.parseObject(taskProps.getTaskParams(), PythonParameters.class); + + if (!pythonParameters.checkParameters()) { + throw new RuntimeException("python task params is not valid"); + } + } + + @Override + public void handle() throws Exception { + try { + // construct process + exitStatusCode = pythonCommandExecutor.run(buildCommand(), processDao); + } catch (Exception e) { + logger.error("python task failure", e); + exitStatusCode = -1; + } + } + + @Override + public void cancelApplication(boolean cancelApplication) throws Exception { + // cancel process + pythonCommandExecutor.cancelApplication(); + } + + /** + * build command + * @return + * @throws Exception + */ + private String buildCommand() throws Exception { + String rawPythonScript = pythonParameters.getRawScript().replaceAll("\\r\\n", "\n"); + + /** + * combining local and global parameters + */ + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + pythonParameters.getLocalParametersMap(), + taskProps.getCmdTypeIfComplement(), + taskProps.getScheduleTime()); + if (paramsMap != null){ + rawPythonScript = ParameterUtils.convertParameterPlaceholders(rawPythonScript, ParamUtils.convert(paramsMap)); + } + + logger.info("raw python script : {}", pythonParameters.getRawScript()); + logger.info("task dir : {}", taskDir); + + return rawPythonScript; + } + + @Override + public AbstractParameters getParameters() { + return pythonParameters; + } + + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java new file mode 100644 index 0000000000..a7264d5977 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.shell; + + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.shell.ShellParameters; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.slf4j.Logger; + +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.Map; +import java.util.Set; + +/** + * shell task + */ +public class ShellTask extends AbstractTask { + + private ShellParameters shellParameters; + + /** + * task dir + */ + private String taskDir; + + private ShellCommandExecutor shellCommandExecutor; + + /** + * process database access + */ + private ProcessDao processDao; + + + public ShellTask(TaskProps taskProps, Logger logger) { + super(taskProps, logger); + + this.taskDir = taskProps.getTaskDir(); + + this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskProps.getTaskDir(), + taskProps.getTaskAppId(), + taskProps.getTaskInstId(), + taskProps.getTenantCode(), + taskProps.getEnvFile(), + taskProps.getTaskStartTime(), + taskProps.getTaskTimeout(), + logger); + this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); + } + + @Override + public void init() { + logger.info("shell task params {}", taskProps.getTaskParams()); + + shellParameters = JSONUtils.parseObject(taskProps.getTaskParams(), ShellParameters.class); + + if (!shellParameters.checkParameters()) { + throw new RuntimeException("shell task params is not valid"); + } + } + + @Override + public void handle() throws Exception { + try { + // construct process + exitStatusCode = shellCommandExecutor.run(buildCommand(), processDao); + } catch (Exception e) { + logger.error("shell task failure", e); + exitStatusCode = -1; + } + } + + @Override + public void cancelApplication(boolean cancelApplication) throws Exception { + // cancel process + shellCommandExecutor.cancelApplication(); + } + + /** + * create command + * @return + * @throws Exception + */ + private String buildCommand() throws Exception { + // generate scripts + String fileName = String.format("%s/%s_node.sh", taskDir, taskProps.getTaskAppId()); + Path path = new File(fileName).toPath(); + + if (Files.exists(path)) { + return fileName; + } + + String script = shellParameters.getRawScript().replaceAll("\\r\\n", "\n"); + + + /** + * combining local and global parameters + */ + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + shellParameters.getLocalParametersMap(), + taskProps.getCmdTypeIfComplement(), + taskProps.getScheduleTime()); + if (paramsMap != null){ + script = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); + } + + + shellParameters.setRawScript(script); + + logger.info("raw script : {}", shellParameters.getRawScript()); + logger.info("task dir : {}", taskDir); + + Set perms = PosixFilePermissions.fromString(Constants.RWXR_XR_X); + FileAttribute> attr = PosixFilePermissions.asFileAttribute(perms); + + Files.createFile(path, attr); + + Files.write(path, shellParameters.getRawScript().getBytes(), StandardOpenOption.APPEND); + + return fileName; + } + + @Override + public AbstractParameters getParameters() { + return shellParameters; + } + + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java new file mode 100644 index 0000000000..2ee42160fc --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.spark; + +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.spark.SparkParameters; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.dolphinscheduler.server.utils.SparkArgsUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * spark task + */ +public class SparkTask extends AbstractYarnTask { + + /** + * spark command + */ + private static final String SPARK_COMMAND = "spark-submit"; + + /** + * spark parameters + */ + private SparkParameters sparkParameters; + + public SparkTask(TaskProps props, Logger logger) { + super(props, logger); + } + + @Override + public void init() { + + logger.info("spark task params {}", taskProps.getTaskParams()); + + sparkParameters = JSONUtils.parseObject(taskProps.getTaskParams(), SparkParameters.class); + + if (!sparkParameters.checkParameters()) { + throw new RuntimeException("spark task params is not valid"); + } + sparkParameters.setQueue(taskProps.getQueue()); + + if (StringUtils.isNotEmpty(sparkParameters.getMainArgs())) { + String args = sparkParameters.getMainArgs(); + + /** + * combining local and global parameters + */ + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + sparkParameters.getLocalParametersMap(), + taskProps.getCmdTypeIfComplement(), + taskProps.getScheduleTime()); + if (paramsMap != null ){ + args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap)); + } + sparkParameters.setMainArgs(args); + } + } + + /** + * create command + * @return + */ + @Override + protected String buildCommand() { + List args = new ArrayList<>(); + + args.add(SPARK_COMMAND); + + // other parameters + args.addAll(SparkArgsUtils.buildArgs(sparkParameters)); + + String command = ParameterUtils + .convertParameterPlaceholders(String.join(" ", args), taskProps.getDefinedParams()); + + logger.info("spark task command : {}", command); + + return command; + } + + @Override + public AbstractParameters getParameters() { + return sparkParameters; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java new file mode 100644 index 0000000000..2e82276040 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java @@ -0,0 +1,447 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sql; + +import org.apache.dolphinscheduler.alert.utils.MailUtils; +import org.apache.dolphinscheduler.common.enums.ShowType; +import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; +import org.apache.dolphinscheduler.common.enums.UdfType; +import org.apache.dolphinscheduler.common.job.db.BaseDataSource; +import org.apache.dolphinscheduler.common.job.db.DataSourceFactory; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.sql.SqlBinds; +import org.apache.dolphinscheduler.common.task.sql.SqlParameters; +import org.apache.dolphinscheduler.common.task.sql.SqlType; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.CommonUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.UdfFunc; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.dolphinscheduler.server.utils.UDFUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.serializer.SerializerFeature; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang3.EnumUtils; +import org.slf4j.Logger; + +import java.sql.*; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static org.apache.dolphinscheduler.common.Constants.*; +import static org.apache.dolphinscheduler.common.enums.DbType.*; +/** + * sql task + */ +public class SqlTask extends AbstractTask { + + /** + * sql parameters + */ + private SqlParameters sqlParameters; + + /** + * process database access + */ + private ProcessDao processDao; + + /** + * alert dao + */ + private AlertDao alertDao; + + /** + * datasource + */ + private DataSource dataSource; + + /** + * base datasource + */ + private BaseDataSource baseDataSource; + + + public SqlTask(TaskProps taskProps, Logger logger) { + super(taskProps, logger); + + logger.info("sql task params {}", taskProps.getTaskParams()); + this.sqlParameters = JSONObject.parseObject(taskProps.getTaskParams(), SqlParameters.class); + + if (!sqlParameters.checkParameters()) { + throw new RuntimeException("sql task params is not valid"); + } + this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); + this.alertDao = DaoFactory.getDaoInstance(AlertDao.class); + } + + @Override + public void handle() throws Exception { + // set the name of the current thread + String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); + Thread.currentThread().setName(threadLoggerInfoName); + logger.info(sqlParameters.toString()); + logger.info("sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {}", + sqlParameters.getType(), + sqlParameters.getDatasource(), + sqlParameters.getSql(), + sqlParameters.getLocalParams(), + sqlParameters.getUdfs(), + sqlParameters.getShowType(), + sqlParameters.getConnParams()); + + // not set data source + if (sqlParameters.getDatasource() == 0){ + logger.error("datasource id not exists"); + exitStatusCode = -1; + return; + } + + dataSource= processDao.findDataSourceById(sqlParameters.getDatasource()); + logger.info("datasource name : {} , type : {} , desc : {} , user_id : {} , parameter : {}", + dataSource.getName(), + dataSource.getType(), + dataSource.getNote(), + dataSource.getUserId(), + dataSource.getConnectionParams()); + + if (dataSource == null){ + logger.error("datasource not exists"); + exitStatusCode = -1; + return; + } + + Connection con = null; + List createFuncs = null; + try { + // load class + DataSourceFactory.loadClass(dataSource.getType()); + // get datasource + baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), + dataSource.getConnectionParams()); + + // ready to execute SQL and parameter entity Map + SqlBinds mainSqlBinds = getSqlAndSqlParamsMap(sqlParameters.getSql()); + List preStatementSqlBinds = Optional.ofNullable(sqlParameters.getPreStatements()) + .orElse(new ArrayList<>()) + .stream() + .map(this::getSqlAndSqlParamsMap) + .collect(Collectors.toList()); + List postStatementSqlBinds = Optional.ofNullable(sqlParameters.getPostStatements()) + .orElse(new ArrayList<>()) + .stream() + .map(this::getSqlAndSqlParamsMap) + .collect(Collectors.toList()); + + // determine if it is UDF + boolean udfTypeFlag = EnumUtils.isValidEnum(UdfType.class, sqlParameters.getType()) + && StringUtils.isNotEmpty(sqlParameters.getUdfs()); + if(udfTypeFlag){ + List udfFuncList = processDao.queryUdfFunListByids(sqlParameters.getUdfs()); + createFuncs = UDFUtils.createFuncs(udfFuncList, taskProps.getTenantCode(), logger); + } + + // execute sql task + con = executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs); + } finally { + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + throw e; + } + } + } + } + + /** + * ready to execute SQL and parameter entity Map + * @return + */ + private SqlBinds getSqlAndSqlParamsMap(String sql) { + Map sqlParamsMap = new HashMap<>(); + StringBuilder sqlBuilder = new StringBuilder(); + + // find process instance by task id + + + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + sqlParameters.getLocalParametersMap(), + taskProps.getCmdTypeIfComplement(), + taskProps.getScheduleTime()); + + // spell SQL according to the final user-defined variable + if(paramsMap == null){ + sqlBuilder.append(sql); + return new SqlBinds(sqlBuilder.toString(), sqlParamsMap); + } + + if (StringUtils.isNotEmpty(sqlParameters.getTitle())){ + String title = ParameterUtils.convertParameterPlaceholders(sqlParameters.getTitle(), + ParamUtils.convert(paramsMap)); + logger.info("SQL tile : {}",title); + sqlParameters.setTitle(title); + } + + // special characters need to be escaped, ${} needs to be escaped + String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*"; + setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap); + + // replace the ${} of the SQL statement with the Placeholder + String formatSql = sql.replaceAll(rgex,"?"); + sqlBuilder.append(formatSql); + + // print repalce sql + printReplacedSql(sql,formatSql,rgex,sqlParamsMap); + return new SqlBinds(sqlBuilder.toString(), sqlParamsMap); + } + + @Override + public AbstractParameters getParameters() { + return this.sqlParameters; + } + + /** + * execute sql + * @param mainSqlBinds + * @param preStatementsBinds + * @param postStatementsBinds + * @param createFuncs + * @return + */ + public Connection executeFuncAndSql(SqlBinds mainSqlBinds, + List preStatementsBinds, + List postStatementsBinds, + List createFuncs){ + Connection connection = null; + try { + // if upload resource is HDFS and kerberos startup + CommonUtils.loadKerberosConf(); + + // if hive , load connection params if exists + if (HIVE == dataSource.getType()) { + Properties paramProp = new Properties(); + paramProp.setProperty(USER, baseDataSource.getUser()); + paramProp.setProperty(PASSWORD, baseDataSource.getPassword()); + Map connParamMap = CollectionUtils.stringToMap(sqlParameters.getConnParams(), + SEMICOLON, + HIVE_CONF); + if(connParamMap != null){ + paramProp.putAll(connParamMap); + } + + connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), + paramProp); + }else{ + connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), + baseDataSource.getUser(), + baseDataSource.getPassword()); + } + + // create temp function + if (CollectionUtils.isNotEmpty(createFuncs)) { + try (Statement funcStmt = connection.createStatement()) { + for (String createFunc : createFuncs) { + logger.info("hive create function sql: {}", createFunc); + funcStmt.execute(createFunc); + } + } + } + + for (SqlBinds sqlBind: preStatementsBinds) { + try (PreparedStatement stmt = prepareStatementAndBind(connection, sqlBind)) { + int result = stmt.executeUpdate(); + logger.info("pre statement execute result: {}, for sql: {}",result,sqlBind.getSql()); + } + } + + try (PreparedStatement stmt = prepareStatementAndBind(connection, mainSqlBinds)) { + // decide whether to executeQuery or executeUpdate based on sqlType + if (sqlParameters.getSqlType() == SqlType.QUERY.ordinal()) { + // query statements need to be convert to JsonArray and inserted into Alert to send + JSONArray resultJSONArray = new JSONArray(); + ResultSet resultSet = stmt.executeQuery(); + ResultSetMetaData md = resultSet.getMetaData(); + int num = md.getColumnCount(); + + while (resultSet.next()) { + JSONObject mapOfColValues = new JSONObject(true); + for (int i = 1; i <= num; i++) { + mapOfColValues.put(md.getColumnName(i), resultSet.getObject(i)); + } + resultJSONArray.add(mapOfColValues); + } + + logger.debug("execute sql : {}", JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); + + // if there is a result set + if (resultJSONArray.size() > 0) { + if (StringUtils.isNotEmpty(sqlParameters.getTitle())) { + sendAttachment(sqlParameters.getTitle(), + JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); + }else{ + sendAttachment(taskProps.getNodeName() + " query resultsets ", + JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); + } + } + + exitStatusCode = 0; + + } else if (sqlParameters.getSqlType() == SqlType.NON_QUERY.ordinal()) { + // non query statement + stmt.executeUpdate(); + exitStatusCode = 0; + } + } + + for (SqlBinds sqlBind: postStatementsBinds) { + try (PreparedStatement stmt = prepareStatementAndBind(connection, sqlBind)) { + int result = stmt.executeUpdate(); + logger.info("post statement execute result: {},for sql: {}",result,sqlBind.getSql()); + } + } + } catch (Exception e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage()); + } + return connection; + } + + /** + * preparedStatement bind + * @param connection + * @param sqlBinds + * @return + * @throws Exception + */ + private PreparedStatement prepareStatementAndBind(Connection connection, SqlBinds sqlBinds) throws Exception { + PreparedStatement stmt = connection.prepareStatement(sqlBinds.getSql()); + // is the timeout set + boolean timeoutFlag = taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.FAILED || + taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.WARNFAILED; + if(timeoutFlag){ + stmt.setQueryTimeout(taskProps.getTaskTimeout()); + } + Map params = sqlBinds.getParamsMap(); + if(params != null){ + for(Integer key : params.keySet()){ + Property prop = params.get(key); + ParameterUtils.setInParameter(key,stmt,prop.getType(),prop.getValue()); + } + } + logger.info("prepare statement replace sql : {} ",stmt.toString()); + return stmt; + } + + /** + * send mail as an attachment + * @param title + * @param content + */ + public void sendAttachment(String title,String content){ + + // process instance + ProcessInstance instance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); + + List users = alertDao.queryUserByAlertGroupId(instance.getWarningGroupId()); + + // receiving group list + List receviersList = new ArrayList(); + for(User user:users){ + receviersList.add(user.getEmail().trim()); + } + // custom receiver + String receivers = sqlParameters.getReceivers(); + if (StringUtils.isNotEmpty(receivers)){ + String[] splits = receivers.split(COMMA); + for (String receiver : splits){ + receviersList.add(receiver.trim()); + } + } + + // copy list + List receviersCcList = new ArrayList(); + // Custom Copier + String receiversCc = sqlParameters.getReceiversCc(); + if (StringUtils.isNotEmpty(receiversCc)){ + String[] splits = receiversCc.split(COMMA); + for (String receiverCc : splits){ + receviersCcList.add(receiverCc.trim()); + } + } + + String showTypeName = sqlParameters.getShowType().replace(COMMA,"").trim(); + if(EnumUtils.isValidEnum(ShowType.class,showTypeName)){ + Map mailResult = MailUtils.sendMails(receviersList, + receviersCcList, title, content, ShowType.valueOf(showTypeName)); + if(!(Boolean) mailResult.get(STATUS)){ + throw new RuntimeException("send mail failed!"); + } + }else{ + logger.error("showType: {} is not valid " ,showTypeName); + throw new RuntimeException(String.format("showType: %s is not valid ",showTypeName)); + } + } + + /** + * regular expressions match the contents between two specified strings + * @param content + * @return + */ + public void setSqlParamsMap(String content, String rgex, Map sqlParamsMap, Map paramsPropsMap){ + Pattern pattern = Pattern.compile(rgex); + Matcher m = pattern.matcher(content); + int index = 1; + while (m.find()) { + + String paramName = m.group(1); + Property prop = paramsPropsMap.get(paramName); + + sqlParamsMap.put(index,prop); + index ++; + } + } + + /** + * print replace sql + * @param content + * @param formatSql + * @param rgex + * @param sqlParamsMap + */ + public void printReplacedSql(String content, String formatSql,String rgex, Map sqlParamsMap){ + //parameter print style + logger.info("after replace sql , preparing : {}" , formatSql); + StringBuilder logPrint = new StringBuilder("replaced sql , parameters:"); + for(int i=1;i<=sqlParamsMap.size();i++){ + logPrint.append(sqlParamsMap.get(i).getValue()+"("+sqlParamsMap.get(i).getType()+")"); + } + logger.info(logPrint.toString()); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java new file mode 100644 index 0000000000..b0c401bcac --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java @@ -0,0 +1,411 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.zk; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.ZKNodeType; +import org.apache.dolphinscheduler.common.model.MasterServer; +import org.apache.dolphinscheduler.common.zk.AbstractZKClient; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.recipes.cache.PathChildrenCache; +import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; +import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; +import org.apache.curator.framework.recipes.locks.InterProcessMutex; +import org.apache.curator.utils.ThreadUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; +import java.util.List; +import java.util.concurrent.ThreadFactory; + + +/** + * zookeeper master client + * + * single instance + */ +public class ZKMasterClient extends AbstractZKClient { + + private static final Logger logger = LoggerFactory.getLogger(ZKMasterClient.class); + + private static final ThreadFactory defaultThreadFactory = ThreadUtils.newGenericThreadFactory("Master-Main-Thread"); + + /** + * master znode + */ + private String masterZNode = null; + + /** + * alert database access + */ + private AlertDao alertDao = null; + /** + * flow database access + */ + private ProcessDao processDao; + + /** + * zkMasterClient + */ + private static ZKMasterClient zkMasterClient = null; + + + private ZKMasterClient(ProcessDao processDao){ + this.processDao = processDao; + init(); + } + + private ZKMasterClient(){} + + /** + * get zkMasterClient + * @param processDao + * @return + */ + public static synchronized ZKMasterClient getZKMasterClient(ProcessDao processDao){ + if(zkMasterClient == null){ + zkMasterClient = new ZKMasterClient(processDao); + } + zkMasterClient.processDao = processDao; + + return zkMasterClient; + } + + /** + * init + */ + public void init(){ + // init dao + this.initDao(); + + InterProcessMutex mutex = null; + try { + // create distributed lock with the root node path of the lock space as /escheduler/lock/failover/master + String znodeLock = getMasterStartUpLockPath(); + mutex = new InterProcessMutex(zkClient, znodeLock); + mutex.acquire(); + + // init system znode + this.initSystemZNode(); + + // monitor master + this.listenerMaster(); + + // monitor worker + this.listenerWorker(); + + // register master + this.registerMaster(); + + // check if fault tolerance is required,failure and tolerance + if (getActiveMasterNum() == 1) { + failoverWorker(null, true); + failoverMaster(null); + } + + }catch (Exception e){ + logger.error("master start up exception : " + e.getMessage(),e); + }finally { + releaseMutex(mutex); + } + } + + + + + /** + * init dao + */ + public void initDao(){ + this.alertDao = DaoFactory.getDaoInstance(AlertDao.class); + this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); + } + /** + * get alert dao + * @return + */ + public AlertDao getAlertDao() { + return alertDao; + } + + + + + /** + * register master znode + */ + public void registerMaster(){ + try { + String serverPath = registerServer(ZKNodeType.MASTER); + if(StringUtils.isEmpty(serverPath)){ + System.exit(-1); + } + masterZNode = serverPath; + } catch (Exception e) { + logger.error("register master failure : " + e.getMessage(),e); + System.exit(-1); + } + } + + + + /** + * monitor master + */ + public void listenerMaster(){ + PathChildrenCache masterPc = new PathChildrenCache(zkClient, + getZNodeParentPath(ZKNodeType.MASTER), true ,defaultThreadFactory); + + try { + masterPc.start(); + masterPc.getListenable().addListener(new PathChildrenCacheListener() { + @Override + public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception { + switch (event.getType()) { + case CHILD_ADDED: + logger.info("master node added : {}",event.getData().getPath()); + break; + case CHILD_REMOVED: + String path = event.getData().getPath(); + String serverHost = getHostByEventDataPath(path); + if(checkServerSelfDead(serverHost, ZKNodeType.MASTER)){ + return; + } + removeZKNodePath(path, ZKNodeType.MASTER, true); + break; + case CHILD_UPDATED: + break; + default: + break; + } + } + }); + }catch (Exception e){ + logger.error("monitor master failed : " + e.getMessage(),e); + } +} + + private void removeZKNodePath(String path, ZKNodeType zkNodeType, boolean failover) { + logger.info("{} node deleted : {}", zkNodeType.toString(), path); + InterProcessMutex mutex = null; + try { + String failoverPath = getFailoverLockPath(zkNodeType); + // create a distributed lock + mutex = new InterProcessMutex(getZkClient(), failoverPath); + mutex.acquire(); + + String serverHost = getHostByEventDataPath(path); + // handle dead server + handleDeadServer(path, zkNodeType, Constants.ADD_ZK_OP); + //alert server down. + alertServerDown(serverHost, zkNodeType); + //failover server + if(failover){ + failoverServerWhenDown(serverHost, zkNodeType); + } + }catch (Exception e){ + logger.error("{} server failover failed.", zkNodeType.toString()); + logger.error("failover exception : " + e.getMessage(),e); + } + finally { + releaseMutex(mutex); + } + } + + private void failoverServerWhenDown(String serverHost, ZKNodeType zkNodeType) throws Exception { + if(StringUtils.isEmpty(serverHost)){ + return ; + } + switch (zkNodeType){ + case MASTER: + failoverMaster(serverHost); + break; + case WORKER: + failoverWorker(serverHost, true); + default: + break; + } + } + + private String getFailoverLockPath(ZKNodeType zkNodeType){ + + switch (zkNodeType){ + case MASTER: + return getMasterFailoverLockPath(); + case WORKER: + return getWorkerFailoverLockPath(); + default: + return ""; + } + } + + private void alertServerDown(String serverHost, ZKNodeType zkNodeType) { + + String serverType = zkNodeType.toString(); + for (int i = 0; i < Constants.ESCHEDULER_WARN_TIMES_FAILOVER; i++) { + alertDao.sendServerStopedAlert(1, serverHost, serverType); + } + } + + /** + * monitor worker + */ + public void listenerWorker(){ + + PathChildrenCache workerPc = new PathChildrenCache(zkClient, + getZNodeParentPath(ZKNodeType.WORKER),true ,defaultThreadFactory); + try { + workerPc.start(); + workerPc.getListenable().addListener(new PathChildrenCacheListener() { + @Override + public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) { + switch (event.getType()) { + case CHILD_ADDED: + logger.info("node added : {}" ,event.getData().getPath()); + break; + case CHILD_REMOVED: + String path = event.getData().getPath(); + logger.info("node deleted : {}",event.getData().getPath()); + removeZKNodePath(path, ZKNodeType.WORKER, true); + break; + default: + break; + } + } + }); + }catch (Exception e){ + logger.error("listener worker failed : " + e.getMessage(),e); + } + } + + + /** + * get master znode + * @return + */ + public String getMasterZNode() { + return masterZNode; + } + + /** + * task needs failover if task start before worker starts + * + * @param taskInstance + * @return + */ + private boolean checkTaskInstanceNeedFailover(TaskInstance taskInstance) throws Exception { + + boolean taskNeedFailover = true; + + //now no host will execute this task instance,so no need to failover the task + if(taskInstance.getHost() == null){ + return false; + } + + // if the worker node exists in zookeeper, we must check the task starts after the worker + if(checkZKNodeExists(taskInstance.getHost(), ZKNodeType.WORKER)){ + //if task start after worker starts, there is no need to failover the task. + if(checkTaskAfterWorkerStart(taskInstance)){ + taskNeedFailover = false; + } + } + return taskNeedFailover; + } + + /** + * check task start after the worker server starts. + * @param taskInstance + * @return + */ + private boolean checkTaskAfterWorkerStart(TaskInstance taskInstance) { + if(StringUtils.isEmpty(taskInstance.getHost())){ + return false; + } + Date workerServerStartDate = null; + List workerServers= getServersList(ZKNodeType.WORKER); + for(MasterServer server : workerServers){ + if(server.getHost().equals(taskInstance.getHost())){ + workerServerStartDate = server.getCreateTime(); + break; + } + } + + if(workerServerStartDate != null){ + return taskInstance.getStartTime().after(workerServerStartDate); + }else{ + return false; + } + } + + /** + * failover worker tasks + * 1. kill yarn job if there are yarn jobs in tasks. + * 2. change task state from running to need failover. + * 3. failover all tasks when workerHost is null + * @param workerHost + */ + private void failoverWorker(String workerHost, boolean needCheckWorkerAlive) throws Exception { + logger.info("start worker[{}] failover ...", workerHost); + + List needFailoverTaskInstanceList = processDao.queryNeedFailoverTaskInstances(workerHost); + for(TaskInstance taskInstance : needFailoverTaskInstanceList){ + if(needCheckWorkerAlive){ + if(!checkTaskInstanceNeedFailover(taskInstance)){ + continue; + } + } + + ProcessInstance instance = processDao.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); + if(instance!=null){ + taskInstance.setProcessInstance(instance); + } + // only kill yarn job if exists , the local thread has exited + ProcessUtils.killYarnJob(taskInstance); + + taskInstance.setState(ExecutionStatus.NEED_FAULT_TOLERANCE); + processDao.saveTaskInstance(taskInstance); + } + logger.info("end worker[{}] failover ...", workerHost); + } + + /** + * failover master tasks + * @param masterHost + */ + private void failoverMaster(String masterHost) { + logger.info("start master failover ..."); + + List needFailoverProcessInstanceList = processDao.queryNeedFailoverProcessInstances(masterHost); + + //updateProcessInstance host is null and insert into command + for(ProcessInstance processInstance : needFailoverProcessInstanceList){ + processDao.processNeedFailoverProcessInstances(processInstance); + } + + logger.info("master failover end"); + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java new file mode 100644 index 0000000000..1cb31a4c88 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java @@ -0,0 +1,166 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.zk; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ZKNodeType; +import org.apache.dolphinscheduler.common.zk.AbstractZKClient; +import org.apache.commons.lang.StringUtils; +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.recipes.cache.PathChildrenCache; +import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; +import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; +import org.apache.curator.utils.ThreadUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; +import java.util.concurrent.ThreadFactory; + + +/** + * zookeeper worker client + * single instance + */ +public class ZKWorkerClient extends AbstractZKClient { + + private static final Logger logger = LoggerFactory.getLogger(ZKWorkerClient.class); + + + private static final ThreadFactory defaultThreadFactory = ThreadUtils.newGenericThreadFactory("Worker-Main-Thread"); + + + /** + * worker znode + */ + private String workerZNode = null; + + /** + * create time + */ + private Date createTime = null; + + /** + * zkWorkerClient + */ + private static ZKWorkerClient zkWorkerClient = null; + + private ZKWorkerClient(){ + init(); + } + + /** + * init + */ + private void init(){ + + // init system znode + this.initSystemZNode(); + + // monitor worker + this.listenerWorker(); + + // register worker + this.registWorker(); + } + + + /** + * get zkWorkerClient + * + * @return + */ + public static synchronized ZKWorkerClient getZKWorkerClient(){ + if(zkWorkerClient == null){ + zkWorkerClient = new ZKWorkerClient(); + } + return zkWorkerClient; + } + + + /** + * register worker + */ + private void registWorker(){ + try { + String serverPath = registerServer(ZKNodeType.WORKER); + if(StringUtils.isEmpty(serverPath)){ + System.exit(-1); + } + workerZNode = serverPath; + } catch (Exception e) { + logger.error("register worker failure : " + e.getMessage(),e); + System.exit(-1); + } + } + + /** + * monitor worker + */ + private void listenerWorker(){ + PathChildrenCache workerPc = new PathChildrenCache(zkClient, getZNodeParentPath(ZKNodeType.WORKER), true, defaultThreadFactory); + try { + + Date now = new Date(); + createTime = now ; + workerPc.start(); + workerPc.getListenable().addListener(new PathChildrenCacheListener() { + @Override + public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception { + switch (event.getType()) { + case CHILD_ADDED: + logger.info("node added : {}" ,event.getData().getPath()); + break; + case CHILD_REMOVED: + String path = event.getData().getPath(); + //find myself dead + String serverHost = getHostByEventDataPath(path); + if(checkServerSelfDead(serverHost, ZKNodeType.WORKER)){ + return; + } + break; + case CHILD_UPDATED: + break; + default: + break; + } + } + }); + }catch (Exception e){ + logger.error("monitor worker failed : " + e.getMessage(),e); + } + + } + + /** + * get worker znode + * @return + */ + public String getWorkerZNode() { + return workerZNode; + } + + /** + * get worker lock path + * @return + */ + public String getWorkerLockPath(){ + return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_WORKERS); + } + + +} diff --git a/escheduler-server/src/main/resources/application_master.properties b/dolphinscheduler-server/src/main/resources/application_master.properties similarity index 100% rename from escheduler-server/src/main/resources/application_master.properties rename to dolphinscheduler-server/src/main/resources/application_master.properties diff --git a/escheduler-server/src/main/resources/application_worker.properties b/dolphinscheduler-server/src/main/resources/application_worker.properties similarity index 100% rename from escheduler-server/src/main/resources/application_worker.properties rename to dolphinscheduler-server/src/main/resources/application_worker.properties diff --git a/escheduler-server/src/main/resources/master.properties b/dolphinscheduler-server/src/main/resources/master.properties similarity index 100% rename from escheduler-server/src/main/resources/master.properties rename to dolphinscheduler-server/src/main/resources/master.properties diff --git a/dolphinscheduler-server/src/main/resources/master_logback.xml b/dolphinscheduler-server/src/main/resources/master_logback.xml new file mode 100644 index 0000000000..cf08b9c8ab --- /dev/null +++ b/dolphinscheduler-server/src/main/resources/master_logback.xml @@ -0,0 +1,34 @@ + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + ${log.base}/escheduler-master.log + + INFO + + + ${log.base}/escheduler-master.%d{yyyy-MM-dd_HH}.%i.log + 168 + 200MB + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + \ No newline at end of file diff --git a/escheduler-server/src/main/resources/worker.properties b/dolphinscheduler-server/src/main/resources/worker.properties similarity index 100% rename from escheduler-server/src/main/resources/worker.properties rename to dolphinscheduler-server/src/main/resources/worker.properties diff --git a/dolphinscheduler-server/src/main/resources/worker_logback.xml b/dolphinscheduler-server/src/main/resources/worker_logback.xml new file mode 100644 index 0000000000..06a5231b20 --- /dev/null +++ b/dolphinscheduler-server/src/main/resources/worker_logback.xml @@ -0,0 +1,61 @@ + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + INFO + + + + taskAppId + ${log.base} + + + + ${log.base}/${taskAppId}.log + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + true + + + + + + ${log.base}/escheduler-worker.log + + INFO + + + + ${log.base}/escheduler-worker.%d{yyyy-MM-dd_HH}.%i.log + 168 + 200MB + +       + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + +    + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/AlertManagerTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/AlertManagerTest.java new file mode 100644 index 0000000000..8f5dae9a5e --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/AlertManagerTest.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.server.utils.AlertManager; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; + + +/** + * alert manager test + */ +@Ignore +public class AlertManagerTest { + + private static final Logger logger = LoggerFactory.getLogger(AlertManagerTest.class); + + @Autowired + ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + ProcessInstanceMapper processInstanceMapper; + + @Autowired + TaskInstanceMapper taskInstanceMapper; + + AlertManager alertManager; + + /** + * send worker alert fault tolerance + */ + @Test + public void sendWarnningWorkerleranceFaultTest(){ + // process instance + ProcessInstance processInstance = processInstanceMapper.queryDetailById(13028); + + // set process definition + ProcessDefinition processDefinition = processDefinitionMapper.selectById(47); + processInstance.setProcessDefinition(processDefinition); + + + // fault task instance + TaskInstance toleranceTask1 = taskInstanceMapper.queryById(5038); + TaskInstance toleranceTask2 = taskInstanceMapper.queryById(5039); + + List toleranceTaskList = new ArrayList<>(2); + toleranceTaskList.add(toleranceTask1); + toleranceTaskList.add(toleranceTask2); + + alertManager.sendAlertWorkerToleranceFault(processInstance, toleranceTaskList); + } + + + /** + * send worker alert fault tolerance + */ + @Test + public void sendWarnningOfProcessInstanceTest(){ + // process instance + ProcessInstance processInstance = processInstanceMapper.queryDetailById(13028); + + // set process definition + ProcessDefinition processDefinition = processDefinitionMapper.selectById(47); + processInstance.setProcessDefinition(processDefinition); + + + // fault task instance + TaskInstance toleranceTask1 = taskInstanceMapper.queryById(5038); + toleranceTask1.setState(ExecutionStatus.FAILURE); + TaskInstance toleranceTask2 = taskInstanceMapper.queryById(5039); + toleranceTask2.setState(ExecutionStatus.FAILURE); + + List toleranceTaskList = new ArrayList<>(2); + toleranceTaskList.add(toleranceTask1); + toleranceTaskList.add(toleranceTask2); + + alertManager.sendAlertProcessInstance(processInstance, toleranceTaskList); + } + +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java new file mode 100644 index 0000000000..d541f43a3b --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java @@ -0,0 +1,139 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master; + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.TaskDependType; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.mapper.CommandMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.utils.DagHelper; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Map; + +/** + * master test + */ +@Ignore +public class MasterCommandTest { + + private final Logger logger = LoggerFactory.getLogger(MasterCommandTest.class); + + private CommandMapper commandMapper; + + private ProcessDefinitionMapper processDefinitionMapper; + + + @Test + public void StartFromFailedCommand(){ + Command cmd = new Command(); + cmd.setCommandType(CommandType.START_FAILURE_TASK_PROCESS); + cmd.setCommandParam("{\"ProcessInstanceId\":325}"); + cmd.setProcessDefinitionId(63); + + commandMapper.insert(cmd); + + } + + @Test + public void RecoverSuspendCommand(){ + + Command cmd = new Command(); + cmd.setProcessDefinitionId(44); + cmd.setCommandParam("{\"ProcessInstanceId\":290}"); + cmd.setCommandType(CommandType.RECOVER_SUSPENDED_PROCESS); + + commandMapper.insert(cmd); + } + + + + + @Test + public void startNewProcessCommand(){ + Command cmd = new Command(); + cmd.setCommandType(CommandType.START_PROCESS); + cmd.setProcessDefinitionId(167); + cmd.setFailureStrategy(FailureStrategy.CONTINUE); + cmd.setWarningType(WarningType.NONE); + cmd.setWarningGroupId(4); + cmd.setExecutorId(19); + + commandMapper.insert(cmd); + } + + @Test + public void ToleranceCommand(){ + Command cmd = new Command(); + cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); + cmd.setCommandParam("{\"ProcessInstanceId\":816}"); + cmd.setProcessDefinitionId(15); + + commandMapper.insert(cmd); + } + + @Test + public void insertCommand(){ + Command cmd = new Command(); + cmd.setCommandType(CommandType.START_PROCESS); + cmd.setFailureStrategy(FailureStrategy.CONTINUE); + cmd.setWarningType(WarningType.ALL); + cmd.setProcessDefinitionId(72); + cmd.setExecutorId(10); + commandMapper.insert(cmd); + } + + + @Test + public void testDagHelper(){ + + ProcessDefinition processDefinition = processDefinitionMapper.selectById(19); + + try { + ProcessDag processDag = DagHelper.generateFlowDag(processDefinition.getProcessDefinitionJson(), + new ArrayList<>(), new ArrayList<>(), TaskDependType.TASK_POST); + + DAG dag = DagHelper.buildDagGraph(processDag); + Collection start = DagHelper.getStartVertex("1", dag, null); + + System.out.println(start.toString()); + + Map forbidden = DagHelper.getForbiddenTaskNodeMaps(processDefinition.getProcessDefinitionJson()); + System.out.println(forbidden); + } catch (Exception e) { + e.printStackTrace(); + } + + } + + + + +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ParamsTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ParamsTest.java new file mode 100644 index 0000000000..46c68fb044 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ParamsTest.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master; + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.DataType; +import org.apache.dolphinscheduler.common.enums.Direct; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import com.alibaba.fastjson.JSON; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Calendar; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + + +/** + * user define param + */ +public class ParamsTest { + + private static final Logger logger = LoggerFactory.getLogger(ParamsTest.class); + + + @Test + public void systemParamsTest()throws Exception{ + String command = "${system.biz.date}"; + + // start process + Map timeParams = BusinessTimeUtils + .getBusinessTime(CommandType.START_PROCESS, + new Date()); + + command = ParameterUtils.convertParameterPlaceholders(command, timeParams); + + logger.info("start process : {}",command); + + + Calendar calendar = Calendar.getInstance(); + calendar.setTime(new Date()); + calendar.add(Calendar.DAY_OF_MONTH, -5); + + + command = "${system.biz.date}"; + // complement data + timeParams = BusinessTimeUtils + .getBusinessTime(CommandType.COMPLEMENT_DATA, + calendar.getTime()); + command = ParameterUtils.convertParameterPlaceholders(command, timeParams); + logger.info("complement data : {}",command); + + } + + @Test + public void convertTest()throws Exception{ + Map globalParams = new HashMap<>(); + Property property = new Property(); + property.setProp("global_param"); + property.setDirect(Direct.IN); + property.setType(DataType.VARCHAR); + property.setValue("${system.biz.date}"); + globalParams.put("global_param",property); + + Map globalParamsMap = new HashMap<>(); + globalParamsMap.put("global_param","${system.biz.date}"); + + + Map localParams = new HashMap<>(); + Property localProperty = new Property(); + localProperty.setProp("local_param"); + localProperty.setDirect(Direct.IN); + localProperty.setType(DataType.VARCHAR); + localProperty.setValue("${global_param}"); + localParams.put("local_param", localProperty); + + Map paramsMap = ParamUtils.convert(globalParams, globalParamsMap, + localParams, CommandType.START_PROCESS, new Date()); + logger.info(JSON.toJSONString(paramsMap)); + + + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/EnvFileTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/EnvFileTest.java new file mode 100644 index 0000000000..5aceb67573 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/EnvFileTest.java @@ -0,0 +1,64 @@ +package org.apache.dolphinscheduler.server.worker; + +import org.apache.commons.lang.StringUtils; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; + + +public class EnvFileTest { + + private static final Logger logger = LoggerFactory.getLogger(EnvFileTest.class); + + @Test + public void test() { + String path = System.getProperty("user.dir")+"/script/env/.escheduler_env.sh"; + String pythonHome = getPythonHome(path); + logger.info(pythonHome); + } + + /** + * get python home + * @param path + * @return + */ + private static String getPythonHome(String path){ + BufferedReader br = null; + String line = null; + StringBuilder sb = new StringBuilder(); + try { + br = new BufferedReader(new InputStreamReader(new FileInputStream(path))); + while ((line = br.readLine()) != null){ + if (line.contains("PYTHON_HOME")){ + sb.append(line); + break; + } + } + String result = sb.toString(); + if (StringUtils.isEmpty(result)){ + return null; + } + String[] arrs = result.split("="); + if (arrs.length == 2){ + return arrs[1]; + } + + }catch (IOException e){ + logger.error("read file failed : " + e.getMessage(),e); + }finally { + try { + if (br != null){ + br.close(); + } + } catch (IOException e) { + logger.error(e.getMessage(),e); + } + } + return null; + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java new file mode 100644 index 0000000000..ba7f11e8d0 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.shell; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.utils.LoggerUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.TaskManager; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import com.alibaba.fastjson.JSONObject; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; + +/** + * python shell command executor test + */ +@Ignore +public class ShellCommandExecutorTest { + + private static final Logger logger = LoggerFactory.getLogger(ShellCommandExecutorTest.class); + + private ProcessDao processDao = null; + + @Before + public void before(){ + processDao = DaoFactory.getDaoInstance(ProcessDao.class); + } + + @Test + public void test() throws Exception { + + TaskProps taskProps = new TaskProps(); + // processDefineId_processInstanceId_taskInstanceId + taskProps.setTaskDir("/opt/soft/program/tmp/escheduler/exec/flow/5/36/2864/7657"); + taskProps.setTaskAppId("36_2864_7657"); + // set tenant -> task execute linux user + taskProps.setTenantCode("hdfs"); + taskProps.setTaskStartTime(new Date()); + taskProps.setTaskTimeout(360000); + taskProps.setTaskInstId(7657); + + + + TaskInstance taskInstance = processDao.findTaskInstanceById(7657); + + String taskJson = taskInstance.getTaskJson(); + TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); + taskProps.setTaskParams(taskNode.getParams()); + + + // custom logger + Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, + taskInstance.getProcessDefinitionId(), + taskInstance.getProcessInstanceId(), + taskInstance.getId())); + + + AbstractTask task = TaskManager.newTask(taskInstance.getTaskType(), taskProps, taskLogger); + + logger.info("task info : {}", task); + + // job init + task.init(); + + // job handle + task.handle(); + ExecutionStatus status = ExecutionStatus.SUCCESS; + + if (task.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ + status = ExecutionStatus.SUCCESS; + }else if (task.getExitStatusCode() == Constants.EXIT_CODE_KILL){ + status = ExecutionStatus.KILL; + }else { + status = ExecutionStatus.FAILURE; + } + + logger.info(status.toString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java new file mode 100644 index 0000000000..15da884c98 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.sql; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.utils.LoggerUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.TaskManager; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import com.alibaba.fastjson.JSONObject; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; + +/** + * python shell command executor test + */ +@Ignore +public class SqlExecutorTest { + + private static final Logger logger = LoggerFactory.getLogger(SqlExecutorTest.class); + + private ProcessDao processDao = null; + + @Before + public void before(){ + processDao = DaoFactory.getDaoInstance(ProcessDao.class); + } + + @Test + public void test() throws Exception { + String nodeName = "mysql sql test"; + String taskAppId = "51_11282_263978"; + String tenantCode = "hdfs"; + int taskInstId = 263978; + sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId); + } + + @Test + public void testClickhouse() throws Exception { + String nodeName = "ClickHouse sql test"; + String taskAppId = "1_11_20"; + String tenantCode = "default"; + int taskInstId = 20; + sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId); + } + + @Test + public void testOracle() throws Exception { + String nodeName = "oracle sql test"; + String taskAppId = "2_13_25"; + String tenantCode = "demo"; + int taskInstId = 25; + sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId); + } + + @Test + public void testSQLServer() throws Exception { + String nodeName = "SQL Server sql test"; + String taskAppId = "3_14_27"; + String tenantCode = "demo"; + int taskInstId = 27; + sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId); + } + + /** + * Basic test template for SQLTasks, mainly test different types of DBMS types + * @param nodeName node name for selected task + * @param taskAppId task app id + * @param tenantCode tenant code + * @param taskInstId task instance id + * @throws Exception + */ + private void sharedTestSqlTask(String nodeName, String taskAppId, String tenantCode, int taskInstId) throws Exception { + TaskProps taskProps = new TaskProps(); + taskProps.setTaskDir(""); + // processDefineId_processInstanceId_taskInstanceId + taskProps.setTaskAppId(taskAppId); + // set tenant -> task execute linux user + taskProps.setTenantCode(tenantCode); + taskProps.setTaskStartTime(new Date()); + taskProps.setTaskTimeout(360000); + taskProps.setTaskInstId(taskInstId); + taskProps.setNodeName(nodeName); + + + + TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + + String taskJson = taskInstance.getTaskJson(); + TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); + taskProps.setTaskParams(taskNode.getParams()); + + + // custom logger + Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, + taskInstance.getProcessDefinitionId(), + taskInstance.getProcessInstanceId(), + taskInstance.getId())); + + + AbstractTask task = TaskManager.newTask(taskInstance.getTaskType(), taskProps, taskLogger); + + logger.info("task info : {}", task); + + // job init + task.init(); + + // job handle + task.handle(); + ExecutionStatus status = ExecutionStatus.SUCCESS; + + if (task.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ + status = ExecutionStatus.SUCCESS; + }else if (task.getExitStatusCode() == Constants.EXIT_CODE_KILL){ + status = ExecutionStatus.KILL; + }else { + status = ExecutionStatus.FAILURE; + } + + logger.info(status.toString()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTaskTest.java new file mode 100644 index 0000000000..3d428eab89 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTaskTest.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.dependent; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DependentTaskTest { + + private static final Logger logger = LoggerFactory.getLogger(DependentTaskTest.class); + + + @Test + public void testDependInit(){ + + TaskProps taskProps = new TaskProps(); + + String dependString = "{\n" + + "\"dependTaskList\":[\n" + + " {\n" + + " \"dependItemList\":[\n" + + " {\n" + + " \"definitionId\": 101,\n" + + " \"depTasks\": \"ALL\",\n" + + " \"cycle\": \"day\",\n" + + " \"dateValue\": \"last1Day\"\n" + + " }\n" + + " ],\n" + + " \"relation\": \"AND\"\n" + + " }\n" + + " ],\n" + + "\"relation\":\"OR\"\n" + + "}"; + + taskProps.setTaskInstId(252612); + taskProps.setDependence(dependString); + DependentTask dependentTask = new DependentTask(taskProps, logger); + dependentTask.init(); + dependentTask.handle(); + Assert.assertEquals(dependentTask.getExitStatusCode(), Constants.EXIT_CODE_FAILURE ); + } + + + +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/StandaloneZKServerForTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/StandaloneZKServerForTest.java new file mode 100644 index 0000000000..2c5d776bd5 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/StandaloneZKServerForTest.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.zk; + +import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; +import org.apache.zookeeper.server.ServerConfig; +import org.apache.zookeeper.server.ZooKeeperServerMain; +import org.apache.zookeeper.server.quorum.QuorumPeerConfig; +import org.junit.Before; +import org.junit.Ignore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.util.Properties; + + +/** + * just for test + */ +@Ignore +public class StandaloneZKServerForTest { + + private static final Logger logger = LoggerFactory.getLogger(StandaloneZKServerForTest.class); + + private static volatile ZooKeeperServerMain zkServer = null; + + + @Before + public void before() { + logger.info("standalone zookeeper server for test service start "); + + ThreadPoolExecutors.getInstance().execute(new Runnable() { + @Override + public void run() { + + //delete zk data dir ? + File zkFile = new File(System.getProperty("java.io.tmpdir"), "zookeeper"); +// if(zkFile.exists()){ +// zkFile.delete(); +// } + startStandaloneServer("2000", zkFile.getAbsolutePath(), "2181", "10", "5"); + } + }); + + } + + + /** + * start zk server + * @param tickTime zookeeper ticktime + * @param dataDir zookeeper data dir + * @param clientPort zookeeper client port + * @param initLimit zookeeper init limit + * @param syncLimit zookeeper sync limit + */ + private void startStandaloneServer(String tickTime, String dataDir, String clientPort, String initLimit, String syncLimit) { + Properties props = new Properties(); + props.setProperty("tickTime", tickTime); + props.setProperty("dataDir", dataDir); + props.setProperty("clientPort", clientPort); + props.setProperty("initLimit", initLimit); + props.setProperty("syncLimit", syncLimit); + + QuorumPeerConfig quorumConfig = new QuorumPeerConfig(); + try { + quorumConfig.parseProperties(props); + + if(zkServer == null ){ + + synchronized (StandaloneZKServerForTest.class){ + if(zkServer == null ){ + zkServer = new ZooKeeperServerMain(); + final ServerConfig config = new ServerConfig(); + config.readFrom(quorumConfig); + zkServer.runFromConfig(config); + } + } + + } + + } catch (Exception e) { + logger.error("start standalone server fail!", e); + } + } + + +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClientTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClientTest.java new file mode 100644 index 0000000000..caac3bdb14 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClientTest.java @@ -0,0 +1,33 @@ +package org.apache.dolphinscheduler.server.zk; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; + +/** + * + */ +public class ZKWorkerClientTest { + + @Test + public void getZKWorkerClient() throws Exception { + + +// ZKWorkerClient zkWorkerClient = ZKWorkerClient.getZKWorkerClient(); +// zkWorkerClient.removeDeadServerByHost("127.0.0.1", Constants.WORKER_PREFIX); + + + } + + @Test + public void test(){ + String ips = ""; + + List ipList = Arrays.asList(ips.split(",")); + + + Assert.assertEquals(1, ipList.size()); + } +} \ No newline at end of file diff --git a/escheduler-server/src/test/resources/dao/data_source.properties b/dolphinscheduler-server/src/test/resources/dao/data_source.properties similarity index 100% rename from escheduler-server/src/test/resources/dao/data_source.properties rename to dolphinscheduler-server/src/test/resources/dao/data_source.properties diff --git a/escheduler-ui/.babelrc b/dolphinscheduler-ui/.babelrc similarity index 100% rename from escheduler-ui/.babelrc rename to dolphinscheduler-ui/.babelrc diff --git a/escheduler-ui/.editorconfig b/dolphinscheduler-ui/.editorconfig similarity index 100% rename from escheduler-ui/.editorconfig rename to dolphinscheduler-ui/.editorconfig diff --git a/escheduler-ui/.env b/dolphinscheduler-ui/.env similarity index 100% rename from escheduler-ui/.env rename to dolphinscheduler-ui/.env diff --git a/escheduler-ui/.eslintrc b/dolphinscheduler-ui/.eslintrc similarity index 100% rename from escheduler-ui/.eslintrc rename to dolphinscheduler-ui/.eslintrc diff --git a/escheduler-ui/build/combo.js b/dolphinscheduler-ui/build/combo.js similarity index 100% rename from escheduler-ui/build/combo.js rename to dolphinscheduler-ui/build/combo.js diff --git a/escheduler-ui/build/config.js b/dolphinscheduler-ui/build/config.js similarity index 100% rename from escheduler-ui/build/config.js rename to dolphinscheduler-ui/build/config.js diff --git a/escheduler-ui/build/webpack.config.combined.js b/dolphinscheduler-ui/build/webpack.config.combined.js similarity index 100% rename from escheduler-ui/build/webpack.config.combined.js rename to dolphinscheduler-ui/build/webpack.config.combined.js diff --git a/escheduler-ui/build/webpack.config.dev.js b/dolphinscheduler-ui/build/webpack.config.dev.js similarity index 100% rename from escheduler-ui/build/webpack.config.dev.js rename to dolphinscheduler-ui/build/webpack.config.dev.js diff --git a/escheduler-ui/build/webpack.config.prod.js b/dolphinscheduler-ui/build/webpack.config.prod.js similarity index 100% rename from escheduler-ui/build/webpack.config.prod.js rename to dolphinscheduler-ui/build/webpack.config.prod.js diff --git a/escheduler-ui/build/webpack.config.test.js b/dolphinscheduler-ui/build/webpack.config.test.js similarity index 100% rename from escheduler-ui/build/webpack.config.test.js rename to dolphinscheduler-ui/build/webpack.config.test.js diff --git a/dolphinscheduler-ui/install-escheduler-ui.sh b/dolphinscheduler-ui/install-escheduler-ui.sh new file mode 100644 index 0000000000..ad38e0e0ee --- /dev/null +++ b/dolphinscheduler-ui/install-escheduler-ui.sh @@ -0,0 +1,219 @@ +#!/bin/bash +# current path +esc_basepath=$(cd `dirname $0`; pwd) + +menu(){ + cat <> /etc/nginx/conf.d/dolphinscheduler.conf + +} + +ubuntu(){ + # update source + apt-get update + + # install nginx + apt-get install -y nginx + + # config nginx + dolphinschedulerConf $1 $2 + + # startup nginx + /etc/init.d/nginx start + sleep 1 + if [ $? -ne 0 ];then + /etc/init.d/nginx start + fi + nginx -s reload +} + +centos7(){ + + rpm -Uvh http://nginx.org/packages/centos/7/noarch/RPMS/nginx-release-centos-7-0.el7.ngx.noarch.rpm + yum install -y nginx + + # config nginx + dolphinschedulerConf $1 $2 + + # solve 0.0.0.0:8888 problem + yum -y install policycoreutils-python + semanage port -a -t http_port_t -p tcp $esc_proxy + + # open front access port + firewall-cmd --zone=public --add-port=$esc_proxy/tcp --permanent + + # startup nginx + systemctl start nginx + sleep 1 + if [ $? -ne 0 ];then + systemctl start nginx + fi + nginx -s reload + + # set SELinux parameters + sed -i "s/SELINUX=enforcing/SELINUX=disabled/g" /etc/selinux/config + # temporary effect + setenforce 0 + +} + + +centos6(){ + + rpm -ivh http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm + + # install nginx + yum install nginx -y + + # config nginx + dolphinschedulerConf $1 $2 + + # startup nginx + /etc/init.d/nginx start + sleep 1 + if [ $? -ne 0 ];then + /etc/init.d/nginx start + fi + nginx -s reload + + # set SELinux parameters + sed -i "s/SELINUX=enforcing/SELINUX=disabled/g" /etc/selinux/config + + # temporary effect + setenforce 0 + +} + +function main(){ + echo "Welcome to thedolphinscheduler front-end deployment script, which is currently only supported by front-end deployment scripts : CentOS and Ubuntu" + echo "Please execute in the dolphinscheduler-ui directory" + + #To be compatible with MacOS and Linux + if [[ "$OSTYPE" == "darwin"* ]]; then + # Mac OSX + echo "Easy Scheduler ui install not support Mac OSX operating system" + exit 1 + elif [[ "$OSTYPE" == "linux-gnu" ]]; then + # linux + echo "linux" + elif [[ "$OSTYPE" == "cygwin" ]]; then + # POSIX compatibility layer and Linux environment emulation for Windows + echo "Easy Scheduler ui not support Windows operating system" + exit 1 + elif [[ "$OSTYPE" == "msys" ]]; then + # Lightweight shell and GNU utilities compiled for Windows (part of MinGW) + echo "Easy Scheduler ui not support Windows operating system" + exit 1 + elif [[ "$OSTYPE" == "win32" ]]; then + echo "Easy Scheduler ui not support Windows operating system" + exit 1 + elif [[ "$OSTYPE" == "freebsd"* ]]; then + # ... + echo "freebsd" + else + # Unknown. + echo "Operating system unknown, please tell us(submit issue) for better service" + exit 1 + fi + + + # config front-end access ports + read -p "Please enter the nginx proxy port, do not enter, the default is 8888 :" esc_proxy_port + if [ -z "${esc_proxy_port}" ];then + esc_proxy_port="8888" + fi + + read -p "Please enter the api server proxy ip, you must enter, for example: 192.168.xx.xx :" esc_api_server_ip + if [ -z "${esc_api_server_ip}" ];then + echo "api server proxy ip can not be empty." + exit 1 + fi + + read -p "Please enter the api server proxy port, do not enter, the default is 12345:" esc_api_server_port + if [ -z "${esc_api_server_port}" ];then + esc_api_server_port="12345" + fi + + # api server backend address + esc_api_server="http://$esc_api_server_ip:$esc_api_server_port" + + # local ip address + esc_ipaddr=$(ip a | grep inet | grep -v inet6 | grep -v 127 | sed 's/^[ \t]*//g' | cut -d ' ' -f2 | head -n 1 | awk -F '/' '{print $1}') + + # Prompt message + menu + + read -p "Please enter the installation number(1|2|3|4):" num + + case $num in + 1) + centos6 ${esc_proxy_port} ${esc_api_server} + ;; + 2) + centos7 ${esc_proxy_port} ${esc_api_server} + ;; + 3) + ubuntu ${esc_proxy_port} ${esc_api_server} + ;; + 4) + echo $"Usage :sh $0" + exit 1 + ;; + *) + echo $"Usage :sh $0" + exit 1 + esac + echo "Please visit the browser:http://${esc_ipaddr}:${esc_proxy_port}" + +} + +main diff --git a/escheduler-ui/package.json b/dolphinscheduler-ui/package.json similarity index 100% rename from escheduler-ui/package.json rename to dolphinscheduler-ui/package.json diff --git a/escheduler-ui/src/combo/1.0.0/3rd.css b/dolphinscheduler-ui/src/combo/1.0.0/3rd.css similarity index 100% rename from escheduler-ui/src/combo/1.0.0/3rd.css rename to dolphinscheduler-ui/src/combo/1.0.0/3rd.css diff --git a/escheduler-ui/src/combo/1.0.0/3rd.js b/dolphinscheduler-ui/src/combo/1.0.0/3rd.js similarity index 100% rename from escheduler-ui/src/combo/1.0.0/3rd.js rename to dolphinscheduler-ui/src/combo/1.0.0/3rd.js diff --git a/escheduler-ui/src/combo/1.0.0/base.css b/dolphinscheduler-ui/src/combo/1.0.0/base.css similarity index 100% rename from escheduler-ui/src/combo/1.0.0/base.css rename to dolphinscheduler-ui/src/combo/1.0.0/base.css diff --git a/escheduler-ui/src/combo/1.0.0/es5.js b/dolphinscheduler-ui/src/combo/1.0.0/es5.js similarity index 100% rename from escheduler-ui/src/combo/1.0.0/es5.js rename to dolphinscheduler-ui/src/combo/1.0.0/es5.js diff --git a/escheduler-ui/src/combo/1.0.0/local.js b/dolphinscheduler-ui/src/combo/1.0.0/local.js similarity index 100% rename from escheduler-ui/src/combo/1.0.0/local.js rename to dolphinscheduler-ui/src/combo/1.0.0/local.js diff --git a/escheduler-ui/src/font/awesome/FontAwesome.otf b/dolphinscheduler-ui/src/font/awesome/FontAwesome.otf similarity index 100% rename from escheduler-ui/src/font/awesome/FontAwesome.otf rename to dolphinscheduler-ui/src/font/awesome/FontAwesome.otf diff --git a/escheduler-ui/src/font/awesome/font-awesome.css b/dolphinscheduler-ui/src/font/awesome/font-awesome.css similarity index 100% rename from escheduler-ui/src/font/awesome/font-awesome.css rename to dolphinscheduler-ui/src/font/awesome/font-awesome.css diff --git a/escheduler-ui/src/font/awesome/fontawesome-webfont.eot b/dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.eot similarity index 100% rename from escheduler-ui/src/font/awesome/fontawesome-webfont.eot rename to dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.eot diff --git a/escheduler-ui/src/font/awesome/fontawesome-webfont.svg b/dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.svg similarity index 100% rename from escheduler-ui/src/font/awesome/fontawesome-webfont.svg rename to dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.svg diff --git a/escheduler-ui/src/font/awesome/fontawesome-webfont.ttf b/dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.ttf similarity index 100% rename from escheduler-ui/src/font/awesome/fontawesome-webfont.ttf rename to dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.ttf diff --git a/escheduler-ui/src/font/awesome/fontawesome-webfont.woff b/dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.woff similarity index 100% rename from escheduler-ui/src/font/awesome/fontawesome-webfont.woff rename to dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.woff diff --git a/escheduler-ui/src/font/awesome/fontawesome-webfont.woff2 b/dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.woff2 similarity index 100% rename from escheduler-ui/src/font/awesome/fontawesome-webfont.woff2 rename to dolphinscheduler-ui/src/font/awesome/fontawesome-webfont.woff2 diff --git a/escheduler-ui/src/font/demo.css b/dolphinscheduler-ui/src/font/demo.css similarity index 100% rename from escheduler-ui/src/font/demo.css rename to dolphinscheduler-ui/src/font/demo.css diff --git a/escheduler-ui/src/font/demo_index.html b/dolphinscheduler-ui/src/font/demo_index.html similarity index 100% rename from escheduler-ui/src/font/demo_index.html rename to dolphinscheduler-ui/src/font/demo_index.html diff --git a/escheduler-ui/src/font/iconfont.css b/dolphinscheduler-ui/src/font/iconfont.css similarity index 100% rename from escheduler-ui/src/font/iconfont.css rename to dolphinscheduler-ui/src/font/iconfont.css diff --git a/escheduler-ui/src/font/iconfont.eot b/dolphinscheduler-ui/src/font/iconfont.eot similarity index 100% rename from escheduler-ui/src/font/iconfont.eot rename to dolphinscheduler-ui/src/font/iconfont.eot diff --git a/escheduler-ui/src/font/iconfont.js b/dolphinscheduler-ui/src/font/iconfont.js similarity index 100% rename from escheduler-ui/src/font/iconfont.js rename to dolphinscheduler-ui/src/font/iconfont.js diff --git a/escheduler-ui/src/font/iconfont.svg b/dolphinscheduler-ui/src/font/iconfont.svg similarity index 100% rename from escheduler-ui/src/font/iconfont.svg rename to dolphinscheduler-ui/src/font/iconfont.svg diff --git a/escheduler-ui/src/font/iconfont.ttf b/dolphinscheduler-ui/src/font/iconfont.ttf similarity index 100% rename from escheduler-ui/src/font/iconfont.ttf rename to dolphinscheduler-ui/src/font/iconfont.ttf diff --git a/escheduler-ui/src/font/iconfont.woff b/dolphinscheduler-ui/src/font/iconfont.woff similarity index 100% rename from escheduler-ui/src/font/iconfont.woff rename to dolphinscheduler-ui/src/font/iconfont.woff diff --git a/escheduler-ui/src/font/iconfont.woff2 b/dolphinscheduler-ui/src/font/iconfont.woff2 similarity index 100% rename from escheduler-ui/src/font/iconfont.woff2 rename to dolphinscheduler-ui/src/font/iconfont.woff2 diff --git a/escheduler-ui/src/images/favicon.ico b/dolphinscheduler-ui/src/images/favicon.ico similarity index 100% rename from escheduler-ui/src/images/favicon.ico rename to dolphinscheduler-ui/src/images/favicon.ico diff --git a/escheduler-ui/src/js/conf/home/App.vue b/dolphinscheduler-ui/src/js/conf/home/App.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/App.vue rename to dolphinscheduler-ui/src/js/conf/home/App.vue diff --git a/escheduler-ui/src/js/conf/home/index.js b/dolphinscheduler-ui/src/js/conf/home/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/index.js rename to dolphinscheduler-ui/src/js/conf/home/index.js diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/config.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/config.js rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/dag.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/dag.js rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.js diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/selectInput.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/selectInput.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/selectInput.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/selectInput.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/timeoutAlarm.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/timeoutAlarm.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/timeoutAlarm.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/timeoutAlarm.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/workerGroups.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/workerGroups.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/workerGroups.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/workerGroups.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.scss similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.scss rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.scss diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/commcon.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/commcon.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/commcon.js rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/commcon.js diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/dependItemList.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/dependItemList.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/dependItemList.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/dependItemList.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/httpParams.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/httpParams.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/httpParams.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/httpParams.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/listBox.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/listBox.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/listBox.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/listBox.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/localParams.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/localParams.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/localParams.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/localParams.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/resources.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/resources.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/resources.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/resources.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/sqlType.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/sqlType.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/sqlType.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/sqlType.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/statementList.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/statementList.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/statementList.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/statementList.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/udfs.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/udfs.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/udfs.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/udfs.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/dependent.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/dependent.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/dependent.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/dependent.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/flink.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/flink.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/flink.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/flink.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/http.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/http.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/http.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/http.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/procedure.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/procedure.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/procedure.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/procedure.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sql.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sql.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sql.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sql.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sub_process.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sub_process.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sub_process.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sub_process.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/jumpAffirm.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/jumpAffirm.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/jumpAffirm.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/jumpAffirm.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/downChart.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/downChart.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/downChart.js rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/downChart.js diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/dragZoom.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/dragZoom.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/dragZoom.js rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/dragZoom.js diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/startingParam/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/startingParam/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/startingParam/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/startingParam/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/udp/_source/selectTenant.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/_source/selectTenant.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/udp/_source/selectTenant.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/_source/selectTenant.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/variable/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/variable/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/variable/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/variable/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/_source/variable/variablesView.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/variable/variablesView.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/_source/variable/variablesView.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/variable/variablesView.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/dag_bg.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/dag_bg.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/dag_bg.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/dag_bg.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toobar_HTTP.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toobar_HTTP.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toobar_HTTP.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toobar_HTTP.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toobar_flink.svg b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toobar_flink.svg similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toobar_flink.svg rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toobar_flink.svg diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_DEPENDENT.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_DEPENDENT.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_DEPENDENT.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_DEPENDENT.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_MR.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_MR.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_MR.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_MR.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_PROCEDURE.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_PROCEDURE.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_PROCEDURE.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_PROCEDURE.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_PYTHON.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_PYTHON.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_PYTHON.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_PYTHON.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SHELL.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SHELL.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SHELL.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SHELL.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SPARK.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SPARK.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SPARK.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SPARK.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SQL.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SQL.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SQL.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SQL.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SUB_PROCESS.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SUB_PROCESS.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SUB_PROCESS.png rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SUB_PROCESS.png diff --git a/escheduler-ui/src/js/conf/home/pages/dag/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue diff --git a/escheduler-ui/src/js/conf/home/pages/datasource/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/datasource/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/datasource/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/datasource/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue b/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue diff --git a/escheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/datasource/pages/list/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/datasource/pages/list/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/home/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/home/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/home/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/home/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/gauge.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/gauge.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/gauge.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/gauge.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/gaugeOption.js b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/gaugeOption.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/gaugeOption.js rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/gaugeOption.js diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/zookeeperList.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/zookeeperList.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/zookeeperList.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/_source/zookeeperList.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/alert.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/alert.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/alert.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/alert.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/apiserver.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/apiserver.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/apiserver.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/apiserver.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/master.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/master.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/master.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/master.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/mysql.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/mysql.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/mysql.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/mysql.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/rpcserver.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/rpcserver.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/rpcserver.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/rpcserver.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/servers.scss b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/servers.scss similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/servers.scss rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/servers.scss diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/statistics.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/statistics.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/statistics.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/statistics.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/worker.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/worker.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/worker.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/worker.vue diff --git a/escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/zookeeper.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/zookeeper.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/monitor/pages/servers/zookeeper.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/zookeeper.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/common.js b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/common.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/common.js rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/common.js diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/conditions.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/conditions.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/conditions.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/conditions.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/taskRecordList/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/create/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/create/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/create/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/create/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/details/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/details/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/details/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/details/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/email.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/email.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/email.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/email.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/start.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/start.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/start.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/start.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/timing.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/timing.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/timing.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/timing.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/util.js b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/util.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/util.js rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/util.js diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/tree.js b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/tree.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/tree.js rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/tree.js diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/util.js b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/util.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/util.js rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/_source/util.js diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/img/dag_bg.png b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/img/dag_bg.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/img/dag_bg.png rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/img/dag_bg.png diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/tree/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/historyTaskRecord/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/historyTaskRecord/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/historyTaskRecord/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/historyTaskRecord/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/chartConfig.js b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/chartConfig.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/chartConfig.js rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/chartConfig.js diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/commandStateCount.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/commandStateCount.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/commandStateCount.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/commandStateCount.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/defineUserCount.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/defineUserCount.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/defineUserCount.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/defineUserCount.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/processStateCount.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/processStateCount.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/processStateCount.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/processStateCount.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/queueCount.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/queueCount.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/queueCount.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/queueCount.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/taskCtatusCount.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/taskCtatusCount.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/taskCtatusCount.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/taskCtatusCount.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/index/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/index/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/instance/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/instance/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/details/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/details/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/details/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/details/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/_source/gantt.js b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/_source/gantt.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/_source/gantt.js rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/_source/gantt.js diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/img/dag_bg.png b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/img/dag_bg.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/img/dag_bg.png rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/img/dag_bg.png diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/gantt/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/createProject.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/createProject.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/createProject.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/createProject.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/list/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/list/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/list/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/list/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/taskRecord/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskRecord/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/taskRecord/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskRecord/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/timing/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/timing/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/projects/pages/timing/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/timing/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/_source/codemirror.js b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/_source/codemirror.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/_source/codemirror.js rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/_source/codemirror.js diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/_source/common.js b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/_source/common.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/_source/common.js rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/_source/common.js diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/create/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/create/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/create/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/create/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/down_error.png b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/down_error.png similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/down_error.png rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/down_error.png diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/noType.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/noType.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/noType.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/noType.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/utils.js b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/utils.js similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/utils.js rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/_source/utils.js diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/details/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/edit/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/edit/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/edit/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/edit/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/rename.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/rename.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/rename.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/rename.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/udf/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/udf/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/createUdf.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/createUdf.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/createUdf.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/createUdf.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/rename.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/rename.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/rename.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/rename.vue diff --git a/escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/queue/_source/createQueue.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/queue/_source/createQueue.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/queue/_source/createQueue.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/queue/_source/createQueue.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/queue/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/queue/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/queue/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/queue/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/queue/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/queue/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/queue/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/queue/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/createTenement.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/createTenement.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/createTenement.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/createTenement.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/tenement/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/tenement/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/tenement/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/tenement/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/token/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/token/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/token/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/token/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/users/_source/createUser.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/_source/createUser.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/users/_source/createUser.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/_source/createUser.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/users/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/users/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/users/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/users/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/createWarning.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/createWarning.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/createWarning.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/createWarning.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/_source/createWorker.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/_source/createWorker.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/_source/createWorker.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/_source/createWorker.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/user/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/user/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/user/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/user/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/user/pages/account/_source/info.vue b/dolphinscheduler-ui/src/js/conf/home/pages/user/pages/account/_source/info.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/user/pages/account/_source/info.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/user/pages/account/_source/info.vue diff --git a/escheduler-ui/src/js/conf/home/pages/user/pages/account/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/user/pages/account/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/user/pages/account/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/user/pages/account/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/user/pages/password/_source/info.vue b/dolphinscheduler-ui/src/js/conf/home/pages/user/pages/password/_source/info.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/user/pages/password/_source/info.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/user/pages/password/_source/info.vue diff --git a/escheduler-ui/src/js/conf/home/pages/user/pages/password/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/user/pages/password/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/user/pages/password/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/user/pages/password/index.vue diff --git a/escheduler-ui/src/js/conf/home/pages/user/pages/token/_source/createToken.vue b/dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/_source/createToken.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/user/pages/token/_source/createToken.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/_source/createToken.vue diff --git a/escheduler-ui/src/js/conf/home/pages/user/pages/token/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/_source/list.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/user/pages/token/_source/list.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/_source/list.vue diff --git a/escheduler-ui/src/js/conf/home/pages/user/pages/token/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/index.vue similarity index 100% rename from escheduler-ui/src/js/conf/home/pages/user/pages/token/index.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/index.vue diff --git a/escheduler-ui/src/js/conf/home/router/index.js b/dolphinscheduler-ui/src/js/conf/home/router/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/router/index.js rename to dolphinscheduler-ui/src/js/conf/home/router/index.js diff --git a/escheduler-ui/src/js/conf/home/store/dag/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/dag/actions.js rename to dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js diff --git a/escheduler-ui/src/js/conf/home/store/dag/getters.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/getters.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/dag/getters.js rename to dolphinscheduler-ui/src/js/conf/home/store/dag/getters.js diff --git a/escheduler-ui/src/js/conf/home/store/dag/index.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/dag/index.js rename to dolphinscheduler-ui/src/js/conf/home/store/dag/index.js diff --git a/escheduler-ui/src/js/conf/home/store/dag/mutations.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/mutations.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/dag/mutations.js rename to dolphinscheduler-ui/src/js/conf/home/store/dag/mutations.js diff --git a/escheduler-ui/src/js/conf/home/store/dag/state.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/state.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/dag/state.js rename to dolphinscheduler-ui/src/js/conf/home/store/dag/state.js diff --git a/escheduler-ui/src/js/conf/home/store/datasource/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/datasource/actions.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/datasource/actions.js rename to dolphinscheduler-ui/src/js/conf/home/store/datasource/actions.js diff --git a/escheduler-ui/src/js/conf/home/store/datasource/getters.js b/dolphinscheduler-ui/src/js/conf/home/store/datasource/getters.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/datasource/getters.js rename to dolphinscheduler-ui/src/js/conf/home/store/datasource/getters.js diff --git a/escheduler-ui/src/js/conf/home/store/datasource/index.js b/dolphinscheduler-ui/src/js/conf/home/store/datasource/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/datasource/index.js rename to dolphinscheduler-ui/src/js/conf/home/store/datasource/index.js diff --git a/escheduler-ui/src/js/conf/home/store/datasource/mutations.js b/dolphinscheduler-ui/src/js/conf/home/store/datasource/mutations.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/datasource/mutations.js rename to dolphinscheduler-ui/src/js/conf/home/store/datasource/mutations.js diff --git a/escheduler-ui/src/js/conf/home/store/datasource/state.js b/dolphinscheduler-ui/src/js/conf/home/store/datasource/state.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/datasource/state.js rename to dolphinscheduler-ui/src/js/conf/home/store/datasource/state.js diff --git a/escheduler-ui/src/js/conf/home/store/index.js b/dolphinscheduler-ui/src/js/conf/home/store/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/index.js rename to dolphinscheduler-ui/src/js/conf/home/store/index.js diff --git a/escheduler-ui/src/js/conf/home/store/monitor/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/monitor/actions.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/monitor/actions.js rename to dolphinscheduler-ui/src/js/conf/home/store/monitor/actions.js diff --git a/escheduler-ui/src/js/conf/home/store/monitor/getters.js b/dolphinscheduler-ui/src/js/conf/home/store/monitor/getters.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/monitor/getters.js rename to dolphinscheduler-ui/src/js/conf/home/store/monitor/getters.js diff --git a/escheduler-ui/src/js/conf/home/store/monitor/index.js b/dolphinscheduler-ui/src/js/conf/home/store/monitor/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/monitor/index.js rename to dolphinscheduler-ui/src/js/conf/home/store/monitor/index.js diff --git a/escheduler-ui/src/js/conf/home/store/monitor/mutations.js b/dolphinscheduler-ui/src/js/conf/home/store/monitor/mutations.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/monitor/mutations.js rename to dolphinscheduler-ui/src/js/conf/home/store/monitor/mutations.js diff --git a/escheduler-ui/src/js/conf/home/store/monitor/state.js b/dolphinscheduler-ui/src/js/conf/home/store/monitor/state.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/monitor/state.js rename to dolphinscheduler-ui/src/js/conf/home/store/monitor/state.js diff --git a/escheduler-ui/src/js/conf/home/store/projects/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/projects/actions.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/projects/actions.js rename to dolphinscheduler-ui/src/js/conf/home/store/projects/actions.js diff --git a/escheduler-ui/src/js/conf/home/store/projects/getters.js b/dolphinscheduler-ui/src/js/conf/home/store/projects/getters.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/projects/getters.js rename to dolphinscheduler-ui/src/js/conf/home/store/projects/getters.js diff --git a/escheduler-ui/src/js/conf/home/store/projects/index.js b/dolphinscheduler-ui/src/js/conf/home/store/projects/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/projects/index.js rename to dolphinscheduler-ui/src/js/conf/home/store/projects/index.js diff --git a/escheduler-ui/src/js/conf/home/store/projects/mutations.js b/dolphinscheduler-ui/src/js/conf/home/store/projects/mutations.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/projects/mutations.js rename to dolphinscheduler-ui/src/js/conf/home/store/projects/mutations.js diff --git a/escheduler-ui/src/js/conf/home/store/projects/state.js b/dolphinscheduler-ui/src/js/conf/home/store/projects/state.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/projects/state.js rename to dolphinscheduler-ui/src/js/conf/home/store/projects/state.js diff --git a/escheduler-ui/src/js/conf/home/store/resource/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/resource/actions.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/resource/actions.js rename to dolphinscheduler-ui/src/js/conf/home/store/resource/actions.js diff --git a/escheduler-ui/src/js/conf/home/store/resource/getters.js b/dolphinscheduler-ui/src/js/conf/home/store/resource/getters.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/resource/getters.js rename to dolphinscheduler-ui/src/js/conf/home/store/resource/getters.js diff --git a/escheduler-ui/src/js/conf/home/store/resource/index.js b/dolphinscheduler-ui/src/js/conf/home/store/resource/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/resource/index.js rename to dolphinscheduler-ui/src/js/conf/home/store/resource/index.js diff --git a/escheduler-ui/src/js/conf/home/store/resource/mutations.js b/dolphinscheduler-ui/src/js/conf/home/store/resource/mutations.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/resource/mutations.js rename to dolphinscheduler-ui/src/js/conf/home/store/resource/mutations.js diff --git a/escheduler-ui/src/js/conf/home/store/resource/state.js b/dolphinscheduler-ui/src/js/conf/home/store/resource/state.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/resource/state.js rename to dolphinscheduler-ui/src/js/conf/home/store/resource/state.js diff --git a/escheduler-ui/src/js/conf/home/store/security/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/security/actions.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/security/actions.js rename to dolphinscheduler-ui/src/js/conf/home/store/security/actions.js diff --git a/escheduler-ui/src/js/conf/home/store/security/getters.js b/dolphinscheduler-ui/src/js/conf/home/store/security/getters.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/security/getters.js rename to dolphinscheduler-ui/src/js/conf/home/store/security/getters.js diff --git a/escheduler-ui/src/js/conf/home/store/security/index.js b/dolphinscheduler-ui/src/js/conf/home/store/security/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/security/index.js rename to dolphinscheduler-ui/src/js/conf/home/store/security/index.js diff --git a/escheduler-ui/src/js/conf/home/store/security/mutations.js b/dolphinscheduler-ui/src/js/conf/home/store/security/mutations.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/security/mutations.js rename to dolphinscheduler-ui/src/js/conf/home/store/security/mutations.js diff --git a/escheduler-ui/src/js/conf/home/store/security/state.js b/dolphinscheduler-ui/src/js/conf/home/store/security/state.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/security/state.js rename to dolphinscheduler-ui/src/js/conf/home/store/security/state.js diff --git a/escheduler-ui/src/js/conf/home/store/user/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/user/actions.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/user/actions.js rename to dolphinscheduler-ui/src/js/conf/home/store/user/actions.js diff --git a/escheduler-ui/src/js/conf/home/store/user/getters.js b/dolphinscheduler-ui/src/js/conf/home/store/user/getters.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/user/getters.js rename to dolphinscheduler-ui/src/js/conf/home/store/user/getters.js diff --git a/escheduler-ui/src/js/conf/home/store/user/index.js b/dolphinscheduler-ui/src/js/conf/home/store/user/index.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/user/index.js rename to dolphinscheduler-ui/src/js/conf/home/store/user/index.js diff --git a/escheduler-ui/src/js/conf/home/store/user/mutations.js b/dolphinscheduler-ui/src/js/conf/home/store/user/mutations.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/user/mutations.js rename to dolphinscheduler-ui/src/js/conf/home/store/user/mutations.js diff --git a/escheduler-ui/src/js/conf/home/store/user/state.js b/dolphinscheduler-ui/src/js/conf/home/store/user/state.js similarity index 100% rename from escheduler-ui/src/js/conf/home/store/user/state.js rename to dolphinscheduler-ui/src/js/conf/home/store/user/state.js diff --git a/escheduler-ui/src/js/conf/login/App.vue b/dolphinscheduler-ui/src/js/conf/login/App.vue similarity index 100% rename from escheduler-ui/src/js/conf/login/App.vue rename to dolphinscheduler-ui/src/js/conf/login/App.vue diff --git a/escheduler-ui/src/js/conf/login/img/login-logo.png b/dolphinscheduler-ui/src/js/conf/login/img/login-logo.png similarity index 100% rename from escheduler-ui/src/js/conf/login/img/login-logo.png rename to dolphinscheduler-ui/src/js/conf/login/img/login-logo.png diff --git a/escheduler-ui/src/js/conf/login/index.js b/dolphinscheduler-ui/src/js/conf/login/index.js similarity index 100% rename from escheduler-ui/src/js/conf/login/index.js rename to dolphinscheduler-ui/src/js/conf/login/index.js diff --git a/escheduler-ui/src/js/module/components/conditions/conditions.vue b/dolphinscheduler-ui/src/js/module/components/conditions/conditions.vue similarity index 100% rename from escheduler-ui/src/js/module/components/conditions/conditions.vue rename to dolphinscheduler-ui/src/js/module/components/conditions/conditions.vue diff --git a/escheduler-ui/src/js/module/components/fileUpdate/definitionUpdate.vue b/dolphinscheduler-ui/src/js/module/components/fileUpdate/definitionUpdate.vue similarity index 100% rename from escheduler-ui/src/js/module/components/fileUpdate/definitionUpdate.vue rename to dolphinscheduler-ui/src/js/module/components/fileUpdate/definitionUpdate.vue diff --git a/escheduler-ui/src/js/module/components/fileUpdate/fileUpdate.vue b/dolphinscheduler-ui/src/js/module/components/fileUpdate/fileUpdate.vue similarity index 100% rename from escheduler-ui/src/js/module/components/fileUpdate/fileUpdate.vue rename to dolphinscheduler-ui/src/js/module/components/fileUpdate/fileUpdate.vue diff --git a/escheduler-ui/src/js/module/components/fileUpdate/udfUpdate.vue b/dolphinscheduler-ui/src/js/module/components/fileUpdate/udfUpdate.vue similarity index 100% rename from escheduler-ui/src/js/module/components/fileUpdate/udfUpdate.vue rename to dolphinscheduler-ui/src/js/module/components/fileUpdate/udfUpdate.vue diff --git a/escheduler-ui/src/js/module/components/layout/layout.vue b/dolphinscheduler-ui/src/js/module/components/layout/layout.vue similarity index 100% rename from escheduler-ui/src/js/module/components/layout/layout.vue rename to dolphinscheduler-ui/src/js/module/components/layout/layout.vue diff --git a/escheduler-ui/src/js/module/components/listBoxF/listBoxF.vue b/dolphinscheduler-ui/src/js/module/components/listBoxF/listBoxF.vue similarity index 100% rename from escheduler-ui/src/js/module/components/listBoxF/listBoxF.vue rename to dolphinscheduler-ui/src/js/module/components/listBoxF/listBoxF.vue diff --git a/escheduler-ui/src/js/module/components/listConstruction/listConstruction.vue b/dolphinscheduler-ui/src/js/module/components/listConstruction/listConstruction.vue similarity index 100% rename from escheduler-ui/src/js/module/components/listConstruction/listConstruction.vue rename to dolphinscheduler-ui/src/js/module/components/listConstruction/listConstruction.vue diff --git a/escheduler-ui/src/js/module/components/nav/logo.png b/dolphinscheduler-ui/src/js/module/components/nav/logo.png similarity index 100% rename from escheduler-ui/src/js/module/components/nav/logo.png rename to dolphinscheduler-ui/src/js/module/components/nav/logo.png diff --git a/escheduler-ui/src/js/module/components/nav/m_logo.png b/dolphinscheduler-ui/src/js/module/components/nav/m_logo.png similarity index 100% rename from escheduler-ui/src/js/module/components/nav/m_logo.png rename to dolphinscheduler-ui/src/js/module/components/nav/m_logo.png diff --git a/escheduler-ui/src/js/module/components/nav/nav.vue b/dolphinscheduler-ui/src/js/module/components/nav/nav.vue similarity index 100% rename from escheduler-ui/src/js/module/components/nav/nav.vue rename to dolphinscheduler-ui/src/js/module/components/nav/nav.vue diff --git a/escheduler-ui/src/js/module/components/noData/images/errorTip.png b/dolphinscheduler-ui/src/js/module/components/noData/images/errorTip.png similarity index 100% rename from escheduler-ui/src/js/module/components/noData/images/errorTip.png rename to dolphinscheduler-ui/src/js/module/components/noData/images/errorTip.png diff --git a/escheduler-ui/src/js/module/components/noData/noData.vue b/dolphinscheduler-ui/src/js/module/components/noData/noData.vue similarity index 100% rename from escheduler-ui/src/js/module/components/noData/noData.vue rename to dolphinscheduler-ui/src/js/module/components/noData/noData.vue diff --git a/escheduler-ui/src/js/module/components/popup/popup.vue b/dolphinscheduler-ui/src/js/module/components/popup/popup.vue similarity index 100% rename from escheduler-ui/src/js/module/components/popup/popup.vue rename to dolphinscheduler-ui/src/js/module/components/popup/popup.vue diff --git a/escheduler-ui/src/js/module/components/priority/priority.vue b/dolphinscheduler-ui/src/js/module/components/priority/priority.vue similarity index 100% rename from escheduler-ui/src/js/module/components/priority/priority.vue rename to dolphinscheduler-ui/src/js/module/components/priority/priority.vue diff --git a/escheduler-ui/src/js/module/components/progressBar/progressBar.vue b/dolphinscheduler-ui/src/js/module/components/progressBar/progressBar.vue similarity index 100% rename from escheduler-ui/src/js/module/components/progressBar/progressBar.vue rename to dolphinscheduler-ui/src/js/module/components/progressBar/progressBar.vue diff --git a/escheduler-ui/src/js/module/components/secondaryMenu/_source/close.png b/dolphinscheduler-ui/src/js/module/components/secondaryMenu/_source/close.png similarity index 100% rename from escheduler-ui/src/js/module/components/secondaryMenu/_source/close.png rename to dolphinscheduler-ui/src/js/module/components/secondaryMenu/_source/close.png diff --git a/escheduler-ui/src/js/module/components/secondaryMenu/_source/menu.js b/dolphinscheduler-ui/src/js/module/components/secondaryMenu/_source/menu.js similarity index 100% rename from escheduler-ui/src/js/module/components/secondaryMenu/_source/menu.js rename to dolphinscheduler-ui/src/js/module/components/secondaryMenu/_source/menu.js diff --git a/escheduler-ui/src/js/module/components/secondaryMenu/_source/open.png b/dolphinscheduler-ui/src/js/module/components/secondaryMenu/_source/open.png similarity index 100% rename from escheduler-ui/src/js/module/components/secondaryMenu/_source/open.png rename to dolphinscheduler-ui/src/js/module/components/secondaryMenu/_source/open.png diff --git a/escheduler-ui/src/js/module/components/secondaryMenu/secondaryMenu.vue b/dolphinscheduler-ui/src/js/module/components/secondaryMenu/secondaryMenu.vue similarity index 100% rename from escheduler-ui/src/js/module/components/secondaryMenu/secondaryMenu.vue rename to dolphinscheduler-ui/src/js/module/components/secondaryMenu/secondaryMenu.vue diff --git a/escheduler-ui/src/js/module/components/spin/spin.vue b/dolphinscheduler-ui/src/js/module/components/spin/spin.vue similarity index 100% rename from escheduler-ui/src/js/module/components/spin/spin.vue rename to dolphinscheduler-ui/src/js/module/components/spin/spin.vue diff --git a/escheduler-ui/src/js/module/components/tooltipsJSON/tooltipsJSON.vue b/dolphinscheduler-ui/src/js/module/components/tooltipsJSON/tooltipsJSON.vue similarity index 100% rename from escheduler-ui/src/js/module/components/tooltipsJSON/tooltipsJSON.vue rename to dolphinscheduler-ui/src/js/module/components/tooltipsJSON/tooltipsJSON.vue diff --git a/escheduler-ui/src/js/module/components/transfer/transfer.vue b/dolphinscheduler-ui/src/js/module/components/transfer/transfer.vue similarity index 100% rename from escheduler-ui/src/js/module/components/transfer/transfer.vue rename to dolphinscheduler-ui/src/js/module/components/transfer/transfer.vue diff --git a/escheduler-ui/src/js/module/download/index.js b/dolphinscheduler-ui/src/js/module/download/index.js similarity index 100% rename from escheduler-ui/src/js/module/download/index.js rename to dolphinscheduler-ui/src/js/module/download/index.js diff --git a/escheduler-ui/src/js/module/echarts/themeData.json b/dolphinscheduler-ui/src/js/module/echarts/themeData.json similarity index 100% rename from escheduler-ui/src/js/module/echarts/themeData.json rename to dolphinscheduler-ui/src/js/module/echarts/themeData.json diff --git a/escheduler-ui/src/js/module/filter/filter.js b/dolphinscheduler-ui/src/js/module/filter/filter.js similarity index 100% rename from escheduler-ui/src/js/module/filter/filter.js rename to dolphinscheduler-ui/src/js/module/filter/filter.js diff --git a/escheduler-ui/src/js/module/filter/formatDate.js b/dolphinscheduler-ui/src/js/module/filter/formatDate.js similarity index 100% rename from escheduler-ui/src/js/module/filter/formatDate.js rename to dolphinscheduler-ui/src/js/module/filter/formatDate.js diff --git a/escheduler-ui/src/js/module/i18n/config.js b/dolphinscheduler-ui/src/js/module/i18n/config.js similarity index 100% rename from escheduler-ui/src/js/module/i18n/config.js rename to dolphinscheduler-ui/src/js/module/i18n/config.js diff --git a/escheduler-ui/src/js/module/i18n/index.js b/dolphinscheduler-ui/src/js/module/i18n/index.js similarity index 100% rename from escheduler-ui/src/js/module/i18n/index.js rename to dolphinscheduler-ui/src/js/module/i18n/index.js diff --git a/escheduler-ui/src/js/module/i18n/locale/en_US.js b/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js similarity index 100% rename from escheduler-ui/src/js/module/i18n/locale/en_US.js rename to dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js diff --git a/escheduler-ui/src/js/module/i18n/locale/zh_CN.js b/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js similarity index 100% rename from escheduler-ui/src/js/module/i18n/locale/zh_CN.js rename to dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js diff --git a/escheduler-ui/src/js/module/io/index.js b/dolphinscheduler-ui/src/js/module/io/index.js similarity index 100% rename from escheduler-ui/src/js/module/io/index.js rename to dolphinscheduler-ui/src/js/module/io/index.js diff --git a/escheduler-ui/src/js/module/mixin/disabledState.js b/dolphinscheduler-ui/src/js/module/mixin/disabledState.js similarity index 100% rename from escheduler-ui/src/js/module/mixin/disabledState.js rename to dolphinscheduler-ui/src/js/module/mixin/disabledState.js diff --git a/escheduler-ui/src/js/module/mixin/listUrlParamHandle.js b/dolphinscheduler-ui/src/js/module/mixin/listUrlParamHandle.js similarity index 100% rename from escheduler-ui/src/js/module/mixin/listUrlParamHandle.js rename to dolphinscheduler-ui/src/js/module/mixin/listUrlParamHandle.js diff --git a/escheduler-ui/src/js/module/permissions/index.js b/dolphinscheduler-ui/src/js/module/permissions/index.js similarity index 100% rename from escheduler-ui/src/js/module/permissions/index.js rename to dolphinscheduler-ui/src/js/module/permissions/index.js diff --git a/escheduler-ui/src/js/module/util/clickoutside.js b/dolphinscheduler-ui/src/js/module/util/clickoutside.js similarity index 100% rename from escheduler-ui/src/js/module/util/clickoutside.js rename to dolphinscheduler-ui/src/js/module/util/clickoutside.js diff --git a/escheduler-ui/src/js/module/util/cookie.js b/dolphinscheduler-ui/src/js/module/util/cookie.js similarity index 100% rename from escheduler-ui/src/js/module/util/cookie.js rename to dolphinscheduler-ui/src/js/module/util/cookie.js diff --git a/escheduler-ui/src/js/module/util/index.js b/dolphinscheduler-ui/src/js/module/util/index.js similarity index 100% rename from escheduler-ui/src/js/module/util/index.js rename to dolphinscheduler-ui/src/js/module/util/index.js diff --git a/escheduler-ui/src/js/module/util/localStorage.js b/dolphinscheduler-ui/src/js/module/util/localStorage.js similarity index 100% rename from escheduler-ui/src/js/module/util/localStorage.js rename to dolphinscheduler-ui/src/js/module/util/localStorage.js diff --git a/escheduler-ui/src/js/module/util/routerUtil.js b/dolphinscheduler-ui/src/js/module/util/routerUtil.js similarity index 100% rename from escheduler-ui/src/js/module/util/routerUtil.js rename to dolphinscheduler-ui/src/js/module/util/routerUtil.js diff --git a/escheduler-ui/src/js/module/util/util.js b/dolphinscheduler-ui/src/js/module/util/util.js similarity index 100% rename from escheduler-ui/src/js/module/util/util.js rename to dolphinscheduler-ui/src/js/module/util/util.js diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/README.md b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/README.md diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/build/config.js b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/build/config.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/build/config.js rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/build/config.js diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/build/webpack.config.prod.js b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/build/webpack.config.prod.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/build/webpack.config.prod.js rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/build/webpack.config.prod.js diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/dist/index.js b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/dist/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/dist/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/dist/index.js diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/dist/index.js.map b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/dist/index.js.map similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/dist/index.js.map rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/dist/index.js.map diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/mock/data.js b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/mock/data.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/mock/data.js rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/mock/data.js diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/mock/theme.json b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/mock/theme.json similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/mock/theme.json rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/mock/theme.json diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/packages/bar.vue b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/bar.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/packages/bar.vue rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/bar.vue diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/packages/funnel.vue b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/funnel.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/packages/funnel.vue rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/funnel.vue diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/packages/line.vue b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/line.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/packages/line.vue rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/line.vue diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/packages/pie.vue b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/pie.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/packages/pie.vue rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/pie.vue diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/packages/radar.vue b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/radar.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/packages/radar.vue rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/radar.vue diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/packages/scatter.vue b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/scatter.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/packages/scatter.vue rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/packages/scatter.vue diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/router/index.js b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/router/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/router/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/router/index.js diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/example/styles/main.scss b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/styles/main.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/example/styles/main.scss rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/example/styles/main.scss diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/package.json b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/package.json similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/package.json rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/package.json diff --git a/escheduler-ui/src/lib/@analysys/ana-charts/postcss.config.js b/dolphinscheduler-ui/src/lib/@analysys/ana-charts/postcss.config.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ana-charts/postcss.config.js rename to dolphinscheduler-ui/src/lib/@analysys/ana-charts/postcss.config.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/ans-ui.min.css b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/ans-ui.min.css similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/ans-ui.min.css rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/ans-ui.min.css diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/ans-ui.min.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/ans-ui.min.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/ans-ui.min.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/ans-ui.min.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.eot b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.eot similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.eot rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.eot diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.svg b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.svg similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.svg rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.svg diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.ttf b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.ttf similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.ttf rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.ttf diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.woff b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.woff similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.woff rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/font/iconfont.woff diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/locale/en.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/locale/en.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/locale/en.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/locale/en.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/locale/en.js.map b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/locale/en.js.map similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/locale/en.js.map rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/locale/en.js.map diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js.map b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js.map similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js.map rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/lib/locale/zh-CN.js.map diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/package.json b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/package.json similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/package.json rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/package.json diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/Box.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/Box.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/Box.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/Box.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/BoxManager.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/BoxManager.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/BoxManager.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/BoxManager.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/base/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/message/message.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/message/message.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/message/message.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/message/message.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/modal/modal.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/modal/modal.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/modal/modal.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/modal/modal.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/notice/notice.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/notice/notice.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/notice/notice.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-box/src/source/layer/notice/notice.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/source/Button.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/source/Button.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/source/Button.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/source/Button.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/source/ButtonGroup.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/source/ButtonGroup.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/source/ButtonGroup.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-button/src/source/ButtonGroup.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/data.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/data.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/data.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/data.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Cascader.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Cascader.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Cascader.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Cascader.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Caspanel.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Caspanel.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Caspanel.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-cascader/src/source/Caspanel.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/Checkbox.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/Checkbox.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/Checkbox.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/Checkbox.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/CheckboxGroup.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/CheckboxGroup.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/CheckboxGroup.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-checkbox/src/source/CheckboxGroup.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/confirm.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/confirm.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/confirm.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/confirm.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/day.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/day.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/day.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/day.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/time.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/time.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/time.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/time.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/years.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/years.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/years.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/base/years.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/datepicker.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/datepicker.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/datepicker.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/datepicker.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/date.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/date.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/date.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/date.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/daterange.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/daterange.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/daterange.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/daterange.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/month.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/month.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/month.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/month.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/time.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/time.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/time.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/time.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/year.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/year.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/year.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/panel/year.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/date.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/date.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/date.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/date.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isType.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isType.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isType.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isType.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isValid.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isValid.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isValid.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/isValid.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/ishms.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/ishms.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/ishms.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/ishms.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/todate.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/todate.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/todate.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-datepicker/src/source/util/todate.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/test.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/test.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/test.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/example/test.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/src/source/drawer.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/src/source/drawer.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/src/source/drawer.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-drawer/src/source/drawer.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/source/Form.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/source/Form.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/source/Form.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/source/Form.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/source/FormItem.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/source/FormItem.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/source/FormItem.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-form/src/source/FormItem.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/source/Input.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/source/Input.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/source/Input.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/source/Input.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/source/util/calcTextareaHeight.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/source/util/calcTextareaHeight.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/source/util/calcTextareaHeight.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-input/src/source/util/calcTextareaHeight.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/src/source/Page.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/src/source/Page.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/src/source/Page.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-pagination/src/source/Page.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/Poptip.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/Poptip.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/Poptip.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/Poptip.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/directive.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/directive.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/directive.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-poptip/src/source/directive.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/src/source/Progress.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/src/source/Progress.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/src/source/Progress.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-progress/src/source/Progress.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/Radio.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/Radio.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/Radio.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/Radio.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/RadioGroup.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/RadioGroup.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/RadioGroup.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-radio/src/source/RadioGroup.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/HorizontalScrollbar.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/HorizontalScrollbar.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/HorizontalScrollbar.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/HorizontalScrollbar.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/Scroller.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/Scroller.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/Scroller.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/Scroller.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/VerticalScrollbar.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/VerticalScrollbar.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/VerticalScrollbar.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-scroller/src/source/VerticalScrollbar.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/async.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/async.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/async.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/async.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/dynamic.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/dynamic.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/dynamic.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/dynamic.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/navigation.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/navigation.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/navigation.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/example/navigation.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Option.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Option.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Option.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Option.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/OptionGroup.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/OptionGroup.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/OptionGroup.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/OptionGroup.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Select.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Select.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Select.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/Select.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/SelectDropdown.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/SelectDropdown.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/SelectDropdown.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-select/src/source/SelectDropdown.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/Spin.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/Spin.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/Spin.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/Spin.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/directive.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/directive.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/directive.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/directive.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/service.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/service.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/service.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-spin/src/source/service.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/src/source/Switch.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/src/source/Switch.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/src/source/Switch.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-switch/src/source/Switch.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/array.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/array.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/array.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/array.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/dynamic.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/dynamic.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/dynamic.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/dynamic.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/indexs.json b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/indexs.json similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/indexs.json rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/indexs.json diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/paging.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/paging.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/paging.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/paging.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/restrict.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/restrict.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/restrict.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/restrict.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/sort.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/sort.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/sort.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/sort.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/tree.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/tree.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/tree.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/example/tree.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/Table.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/Table.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/Table.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/Table.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableBody.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableBody.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableBody.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableBody.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableColumn.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableColumn.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableColumn.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableColumn.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableHeader.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableHeader.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableHeader.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableHeader.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTd.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTd.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTd.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTd.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTh.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTh.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTh.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/TableTh.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/cellRenderer.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/cellRenderer.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/cellRenderer.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/cellRenderer.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layout.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layout.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layout.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layout.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layoutObserver.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layoutObserver.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layoutObserver.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/layoutObserver.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/store.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/store.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/store.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-table/src/source/store.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/source/Timepicker.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/source/Timepicker.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/source/Timepicker.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-timepicker/src/source/Timepicker.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/README.md b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/README.md similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/README.md rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/README.md diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/app.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/app.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/app.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.html b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.html rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.html diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/example/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/Tooltip.vue b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/Tooltip.vue similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/Tooltip.vue rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/Tooltip.vue diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/directive.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/directive.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/directive.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/directive.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/factory.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/factory.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/factory.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/packages/vue-tooltip/src/source/factory.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/locale/format.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/locale/format.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/locale/format.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/locale/format.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/locale/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/locale/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/locale/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/locale/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/locale/lang/en.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/locale/lang/en.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/locale/lang/en.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/locale/lang/en.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/locale/lang/zh-CN.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/locale/lang/zh-CN.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/locale/lang/zh-CN.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/locale/lang/zh-CN.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/attentionSeekers.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/attentionSeekers.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/attentionSeekers.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/attentionSeekers.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/bouncingEntrances.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/bouncingEntrances.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/bouncingEntrances.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/bouncingEntrances.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/bouncingExits.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/bouncingExits.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/bouncingExits.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/bouncingExits.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/fadingEntrances.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/fadingEntrances.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/fadingEntrances.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/fadingEntrances.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/fadingExits.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/fadingExits.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/fadingExits.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/fadingExits.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/flippers.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/flippers.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/flippers.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/flippers.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/index.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/index.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/index.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/index.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/lightspeed.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/lightspeed.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/lightspeed.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/lightspeed.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/rotatingEntrances.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/rotatingEntrances.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/rotatingEntrances.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/rotatingEntrances.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/rotatingExits.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/rotatingExits.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/rotatingExits.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/rotatingExits.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/slidingEntrances.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/slidingEntrances.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/slidingEntrances.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/slidingEntrances.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/slidingExits.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/slidingExits.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/slidingExits.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/slidingExits.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/specials.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/specials.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/specials.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/specials.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/zoomEntrances.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/zoomEntrances.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/zoomEntrances.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/zoomEntrances.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/zoomExits.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/zoomExits.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/zoomExits.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/animation/zoomExits.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/common.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/common.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/common.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/common.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/box.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/box.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/box.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/box.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/message.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/message.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/message.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/message.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/modal.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/modal.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/modal.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/modal.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/notice.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/notice.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/notice.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/box/notice.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/button/button.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/button/button.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/button/button.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/button/button.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/button/mixin.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/button/mixin.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/button/mixin.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/button/mixin.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/cascader/cascader.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/cascader/cascader.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/cascader/cascader.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/cascader/cascader.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/checkbox/checkbox.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/checkbox/checkbox.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/checkbox/checkbox.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/checkbox/checkbox.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/datepicker/datepicker.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/datepicker/datepicker.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/datepicker/datepicker.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/datepicker/datepicker.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/drawer/drawer.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/drawer/drawer.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/drawer/drawer.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/drawer/drawer.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/form/form.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/form/form.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/form/form.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/form/form.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/index.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/index.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/index.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/index.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/input/input.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/input/input.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/input/input.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/input/input.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/pagination/pagination.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/pagination/pagination.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/pagination/pagination.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/pagination/pagination.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/poptip/poptip.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/poptip/poptip.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/poptip/poptip.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/poptip/poptip.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/progress/progress.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/progress/progress.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/progress/progress.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/progress/progress.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/radio/radio.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/radio/radio.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/radio/radio.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/radio/radio.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/scroller/scroller.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/scroller/scroller.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/scroller/scroller.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/scroller/scroller.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/select/select.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/select/select.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/select/select.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/select/select.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/spin/spin.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/spin/spin.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/spin/spin.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/spin/spin.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/switch/switch.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/switch/switch.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/switch/switch.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/switch/switch.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/table/table.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/table/table.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/table/table.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/table/table.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/tooltip/tooltip.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/tooltip/tooltip.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/components/tooltip/tooltip.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/components/tooltip/tooltip.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/font.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/font.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.eot b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.eot similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.eot rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.eot diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.svg b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.svg similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.svg rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.svg diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.ttf b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.ttf similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.ttf rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.ttf diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.woff b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.woff similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.woff rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/font/iconfont.woff diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/index.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/index.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/index.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/index.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/style/vars.scss b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/vars.scss similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/style/vars.scss rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/style/vars.scss diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/assist.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/assist.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/assist.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/assist.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/constants.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/constants.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/constants.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/constants.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/customRenderer.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/customRenderer.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/customRenderer.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/customRenderer.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/clickOutside.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/clickOutside.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/clickOutside.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/clickOutside.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/mousewheel.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/mousewheel.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/mousewheel.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/directives/mousewheel.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/animatedScroll.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/animatedScroll.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/animatedScroll.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/animatedScroll.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/class.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/class.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/class.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/class.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/limitedLoop.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/limitedLoop.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/limitedLoop.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/limitedLoop.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/scrollIntoView.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/scrollIntoView.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/scrollIntoView.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/scrollIntoView.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/scrollbarWidth.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/scrollbarWidth.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/scrollbarWidth.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/scrollbarWidth.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/style.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/style.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/style.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/dom/style.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/event.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/event.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/event.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/event.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/lang.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/lang.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/lang.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/lang.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/emitter.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/emitter.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/emitter.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/emitter.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/index.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/index.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/index.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/index.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/locale.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/locale.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/locale.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/locale.js diff --git a/escheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/popper.js b/dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/popper.js similarity index 100% rename from escheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/popper.js rename to dolphinscheduler-ui/src/lib/@analysys/ans-ui/src/util/mixins/popper.js diff --git a/escheduler-ui/src/lib/@fedor/io/README.md b/dolphinscheduler-ui/src/lib/@fedor/io/README.md similarity index 100% rename from escheduler-ui/src/lib/@fedor/io/README.md rename to dolphinscheduler-ui/src/lib/@fedor/io/README.md diff --git a/escheduler-ui/src/lib/@fedor/io/dist/io.esm.js b/dolphinscheduler-ui/src/lib/@fedor/io/dist/io.esm.js similarity index 100% rename from escheduler-ui/src/lib/@fedor/io/dist/io.esm.js rename to dolphinscheduler-ui/src/lib/@fedor/io/dist/io.esm.js diff --git a/escheduler-ui/src/lib/@fedor/io/dist/io.js b/dolphinscheduler-ui/src/lib/@fedor/io/dist/io.js similarity index 100% rename from escheduler-ui/src/lib/@fedor/io/dist/io.js rename to dolphinscheduler-ui/src/lib/@fedor/io/dist/io.js diff --git a/escheduler-ui/src/lib/@fedor/io/dist/io.min.js b/dolphinscheduler-ui/src/lib/@fedor/io/dist/io.min.js similarity index 100% rename from escheduler-ui/src/lib/@fedor/io/dist/io.min.js rename to dolphinscheduler-ui/src/lib/@fedor/io/dist/io.min.js diff --git a/escheduler-ui/src/lib/@fedor/io/package.json b/dolphinscheduler-ui/src/lib/@fedor/io/package.json similarity index 100% rename from escheduler-ui/src/lib/@fedor/io/package.json rename to dolphinscheduler-ui/src/lib/@fedor/io/package.json diff --git a/escheduler-ui/src/lib/@fedor/progress-webpack-plugin/lib/index.js b/dolphinscheduler-ui/src/lib/@fedor/progress-webpack-plugin/lib/index.js similarity index 100% rename from escheduler-ui/src/lib/@fedor/progress-webpack-plugin/lib/index.js rename to dolphinscheduler-ui/src/lib/@fedor/progress-webpack-plugin/lib/index.js diff --git a/escheduler-ui/src/lib/@fedor/progress-webpack-plugin/package.json b/dolphinscheduler-ui/src/lib/@fedor/progress-webpack-plugin/package.json similarity index 100% rename from escheduler-ui/src/lib/@fedor/progress-webpack-plugin/package.json rename to dolphinscheduler-ui/src/lib/@fedor/progress-webpack-plugin/package.json diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/.circleci/config.yml b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/.circleci/config.yml similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/.circleci/config.yml rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/.circleci/config.yml diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/.prettierrc.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/.prettierrc.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/.prettierrc.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/.prettierrc.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/CHANGELOG.md b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/CHANGELOG.md similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/CHANGELOG.md rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/CHANGELOG.md diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/README.md b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/README.md similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/README.md rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/README.md diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileStyle.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileStyle.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileStyle.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileStyle.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileStyle.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileStyle.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileStyle.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileStyle.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileTemplate.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileTemplate.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileTemplate.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileTemplate.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileTemplate.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileTemplate.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileTemplate.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/compileTemplate.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/index.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/index.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/index.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/index.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/index.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/index.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/index.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/index.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/parse.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/parse.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/parse.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/parse.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/parse.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/parse.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/parse.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/parse.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/scoped.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/stylePlugins/trim.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/styleProcessors/index.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/assetUrl.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/srcset.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/templateCompilerModules/utils.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/types.d.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/types.d.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/types.d.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/types.d.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/dist/types.js b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/types.js similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/dist/types.js rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/dist/types.js diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/compileStyle.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/compileStyle.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/compileStyle.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/compileStyle.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/compileTemplate.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/compileTemplate.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/compileTemplate.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/compileTemplate.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/index.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/index.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/index.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/index.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/parse.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/parse.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/parse.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/parse.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/stylePlugins/scoped.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/stylePlugins/scoped.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/stylePlugins/scoped.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/stylePlugins/scoped.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/stylePlugins/trim.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/stylePlugins/trim.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/stylePlugins/trim.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/stylePlugins/trim.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/styleProcessors/index.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/styleProcessors/index.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/styleProcessors/index.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/styleProcessors/index.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/assetUrl.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/assetUrl.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/assetUrl.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/assetUrl.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/srcset.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/srcset.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/srcset.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/srcset.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/utils.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/utils.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/utils.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/templateCompilerModules/utils.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/lib/types.ts b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/types.ts similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/lib/types.ts rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/lib/types.ts diff --git a/escheduler-ui/src/lib/@vue/component-compiler-utils/package.json b/dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/package.json similarity index 100% rename from escheduler-ui/src/lib/@vue/component-compiler-utils/package.json rename to dolphinscheduler-ui/src/lib/@vue/component-compiler-utils/package.json diff --git a/escheduler-ui/src/lib/@vue/crontab/README.md b/dolphinscheduler-ui/src/lib/@vue/crontab/README.md similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/README.md rename to dolphinscheduler-ui/src/lib/@vue/crontab/README.md diff --git a/escheduler-ui/src/lib/@vue/crontab/build/config.js b/dolphinscheduler-ui/src/lib/@vue/crontab/build/config.js similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/build/config.js rename to dolphinscheduler-ui/src/lib/@vue/crontab/build/config.js diff --git a/escheduler-ui/src/lib/@vue/crontab/build/webpack.config.prod.js b/dolphinscheduler-ui/src/lib/@vue/crontab/build/webpack.config.prod.js similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/build/webpack.config.prod.js rename to dolphinscheduler-ui/src/lib/@vue/crontab/build/webpack.config.prod.js diff --git a/escheduler-ui/src/lib/@vue/crontab/dist/index.css b/dolphinscheduler-ui/src/lib/@vue/crontab/dist/index.css similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/dist/index.css rename to dolphinscheduler-ui/src/lib/@vue/crontab/dist/index.css diff --git a/escheduler-ui/src/lib/@vue/crontab/dist/index.js b/dolphinscheduler-ui/src/lib/@vue/crontab/dist/index.js similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/dist/index.js rename to dolphinscheduler-ui/src/lib/@vue/crontab/dist/index.js diff --git a/escheduler-ui/src/lib/@vue/crontab/dist/index.js.map b/dolphinscheduler-ui/src/lib/@vue/crontab/dist/index.js.map similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/dist/index.js.map rename to dolphinscheduler-ui/src/lib/@vue/crontab/dist/index.js.map diff --git a/escheduler-ui/src/lib/@vue/crontab/example/app.vue b/dolphinscheduler-ui/src/lib/@vue/crontab/example/app.vue similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/example/app.vue rename to dolphinscheduler-ui/src/lib/@vue/crontab/example/app.vue diff --git a/escheduler-ui/src/lib/@vue/crontab/example/index.html b/dolphinscheduler-ui/src/lib/@vue/crontab/example/index.html similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/example/index.html rename to dolphinscheduler-ui/src/lib/@vue/crontab/example/index.html diff --git a/escheduler-ui/src/lib/@vue/crontab/example/index.js b/dolphinscheduler-ui/src/lib/@vue/crontab/example/index.js similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/example/index.js rename to dolphinscheduler-ui/src/lib/@vue/crontab/example/index.js diff --git a/escheduler-ui/src/lib/@vue/crontab/package.json b/dolphinscheduler-ui/src/lib/@vue/crontab/package.json similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/package.json rename to dolphinscheduler-ui/src/lib/@vue/crontab/package.json diff --git a/escheduler-ui/src/lib/@vue/crontab/postcss.config.js b/dolphinscheduler-ui/src/lib/@vue/crontab/postcss.config.js similarity index 100% rename from escheduler-ui/src/lib/@vue/crontab/postcss.config.js rename to dolphinscheduler-ui/src/lib/@vue/crontab/postcss.config.js diff --git a/escheduler-ui/src/lib/external/config.js b/dolphinscheduler-ui/src/lib/external/config.js similarity index 100% rename from escheduler-ui/src/lib/external/config.js rename to dolphinscheduler-ui/src/lib/external/config.js diff --git a/escheduler-ui/src/lib/external/email.js b/dolphinscheduler-ui/src/lib/external/email.js similarity index 100% rename from escheduler-ui/src/lib/external/email.js rename to dolphinscheduler-ui/src/lib/external/email.js diff --git a/escheduler-ui/src/sass/common/_animation.scss b/dolphinscheduler-ui/src/sass/common/_animation.scss similarity index 100% rename from escheduler-ui/src/sass/common/_animation.scss rename to dolphinscheduler-ui/src/sass/common/_animation.scss diff --git a/escheduler-ui/src/sass/common/_font.scss b/dolphinscheduler-ui/src/sass/common/_font.scss similarity index 100% rename from escheduler-ui/src/sass/common/_font.scss rename to dolphinscheduler-ui/src/sass/common/_font.scss diff --git a/escheduler-ui/src/sass/common/_mixin.scss b/dolphinscheduler-ui/src/sass/common/_mixin.scss similarity index 100% rename from escheduler-ui/src/sass/common/_mixin.scss rename to dolphinscheduler-ui/src/sass/common/_mixin.scss diff --git a/escheduler-ui/src/sass/common/_normalize.scss b/dolphinscheduler-ui/src/sass/common/_normalize.scss similarity index 100% rename from escheduler-ui/src/sass/common/_normalize.scss rename to dolphinscheduler-ui/src/sass/common/_normalize.scss diff --git a/escheduler-ui/src/sass/common/_scrollbar.scss b/dolphinscheduler-ui/src/sass/common/_scrollbar.scss similarity index 100% rename from escheduler-ui/src/sass/common/_scrollbar.scss rename to dolphinscheduler-ui/src/sass/common/_scrollbar.scss diff --git a/escheduler-ui/src/sass/common/_table.scss b/dolphinscheduler-ui/src/sass/common/_table.scss similarity index 100% rename from escheduler-ui/src/sass/common/_table.scss rename to dolphinscheduler-ui/src/sass/common/_table.scss diff --git a/escheduler-ui/src/sass/common/index.scss b/dolphinscheduler-ui/src/sass/common/index.scss similarity index 100% rename from escheduler-ui/src/sass/common/index.scss rename to dolphinscheduler-ui/src/sass/common/index.scss diff --git a/escheduler-ui/src/sass/conf/home/index.scss b/dolphinscheduler-ui/src/sass/conf/home/index.scss similarity index 100% rename from escheduler-ui/src/sass/conf/home/index.scss rename to dolphinscheduler-ui/src/sass/conf/home/index.scss diff --git a/escheduler-ui/src/sass/conf/login/index.scss b/dolphinscheduler-ui/src/sass/conf/login/index.scss similarity index 100% rename from escheduler-ui/src/sass/conf/login/index.scss rename to dolphinscheduler-ui/src/sass/conf/login/index.scss diff --git a/escheduler-ui/src/view/common/meta.inc b/dolphinscheduler-ui/src/view/common/meta.inc similarity index 100% rename from escheduler-ui/src/view/common/meta.inc rename to dolphinscheduler-ui/src/view/common/meta.inc diff --git a/escheduler-ui/src/view/common/outro.inc b/dolphinscheduler-ui/src/view/common/outro.inc similarity index 100% rename from escheduler-ui/src/view/common/outro.inc rename to dolphinscheduler-ui/src/view/common/outro.inc diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/gitbook/gitbook-plugin-livereload/plugin.js b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/gitbook/gitbook-plugin-livereload/plugin.js similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/gitbook/gitbook-plugin-livereload/plugin.js rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/gitbook/gitbook-plugin-livereload/plugin.js diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/gitbook/gitbook.js b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/gitbook/gitbook.js similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/gitbook/gitbook.js rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/gitbook/gitbook.js diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/gitbook/theme.js b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/gitbook/theme.js similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/gitbook/theme.js rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/gitbook/theme.js diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/addtenant.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/addtenant.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/addtenant.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/addtenant.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/auth_project.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/auth_project.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/auth_project.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/auth_project.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/auth_user.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/auth_user.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/auth_user.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/auth_user.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/complement_data.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/complement_data.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/complement_data.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/complement_data.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/definition_create.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/definition_create.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/definition_create.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/definition_create.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/definition_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/definition_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/definition_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/definition_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/definition_list.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/definition_list.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/definition_list.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/definition_list.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_1.jpg b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_1.jpg similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_1.jpg rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_1.jpg diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_2.jpg b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_2.jpg similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_2.jpg rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_2.jpg diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit2.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit2.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit2.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit2.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit3.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit3.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit3.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit3.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit4.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit4.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit4.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/dependent_edit4.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/file_create.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/file_create.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/file_create.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/file_create.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/file_detail.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/file_detail.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/file_detail.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/file_detail.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/file_rename.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/file_rename.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/file_rename.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/file_rename.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/file_upload.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/file_upload.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/file_upload.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/file_upload.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/flink_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/flink_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/flink_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/flink_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/gantt.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/gantt.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/gantt.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/gantt.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/global_parameter.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/global_parameter.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/global_parameter.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/global_parameter.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/hive_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/hive_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/hive_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/hive_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/hive_edit2.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/hive_edit2.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/hive_edit2.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/hive_edit2.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/local_parameter.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/local_parameter.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/local_parameter.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/local_parameter.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/login.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/login.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/login.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/login.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/logout.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/logout.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/logout.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/logout.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/mail_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/mail_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/mail_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/mail_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/master.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/master.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/master.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/master.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/mr_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/mr_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/mr_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/mr_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/mr_java.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/mr_java.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/mr_java.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/mr_java.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/mysql_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/mysql_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/mysql_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/mysql_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/postgresql_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/postgresql_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/postgresql_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/postgresql_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/procedure_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/procedure_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/procedure_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/procedure_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/process_instance.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/process_instance.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/process_instance.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/process_instance.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/process_instance_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/process_instance_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/process_instance_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/process_instance_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/project_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/project_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/project_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/project_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/project_index.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/project_index.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/project_index.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/project_index.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/python_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/python_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/python_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/python_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/scheduler.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/scheduler.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/scheduler.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/scheduler.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/scheduler2.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/scheduler2.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/scheduler2.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/scheduler2.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/shell_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/shell_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/shell_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/shell_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/spark_datesource.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/spark_datesource.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/spark_datesource.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/spark_datesource.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/spark_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/spark_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/spark_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/spark_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/sql_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/sql_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/sql_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/sql_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/start_from_current.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/start_from_current.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/start_from_current.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/start_from_current.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/start_from_current2.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/start_from_current2.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/start_from_current2.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/start_from_current2.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/start_process.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/start_process.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/start_process.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/start_process.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/subprocess_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/subprocess_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/subprocess_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/subprocess_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/task_history.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/task_history.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/task_history.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/task_history.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/task_list.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/task_list.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/task_list.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/task_list.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/task_log.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/task_log.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/task_log.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/task_log.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/task_log2.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/task_log2.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/task_log2.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/task_log2.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_DEPENDENT.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_DEPENDENT.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_DEPENDENT.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_DEPENDENT.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_MR.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_MR.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_MR.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_MR.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_PROCEDURE.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_PROCEDURE.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_PROCEDURE.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_PROCEDURE.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_PYTHON.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_PYTHON.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_PYTHON.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_PYTHON.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SHELL.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SHELL.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SHELL.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SHELL.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SPARK.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SPARK.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SPARK.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SPARK.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SQL.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SQL.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SQL.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SQL.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SUB_PROCESS.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SUB_PROCESS.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SUB_PROCESS.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/toolbar_SUB_PROCESS.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/tree_view.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/tree_view.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/tree_view.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/tree_view.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/udf_edit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/udf_edit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/udf_edit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/udf_edit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/user_manager.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/user_manager.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/user_manager.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/user_manager.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/useredit.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/useredit.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/useredit.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/useredit.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/useredit2.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/useredit2.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/useredit2.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/useredit2.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/userinfo.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/userinfo.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/userinfo.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/userinfo.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/variable_view.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/variable_view.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/variable_view.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/variable_view.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/variable_view2.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/variable_view2.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/variable_view2.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/variable_view2.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/images/worker.png b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/worker.png similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/images/worker.png rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/images/worker.png diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/index.html b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/index.html similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/index.html rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/index.html diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/pages/deploy-background.html b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/deploy-background.html similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/pages/deploy-background.html rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/deploy-background.html diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/pages/deploy-foreground.html b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/deploy-foreground.html similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/pages/deploy-foreground.html rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/deploy-foreground.html diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/pages/development.html b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/development.html similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/pages/development.html rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/development.html diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/pages/guide-architecture.html b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/guide-architecture.html similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/pages/guide-architecture.html rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/guide-architecture.html diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/pages/guide-manual.html b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/guide-manual.html similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/pages/guide-manual.html rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/pages/guide-manual.html diff --git a/escheduler-ui/src/view/docs/zh_CN/_book/search_index.json b/dolphinscheduler-ui/src/view/docs/zh_CN/_book/search_index.json similarity index 100% rename from escheduler-ui/src/view/docs/zh_CN/_book/search_index.json rename to dolphinscheduler-ui/src/view/docs/zh_CN/_book/search_index.json diff --git a/escheduler-ui/src/view/home/index.html b/dolphinscheduler-ui/src/view/home/index.html similarity index 100% rename from escheduler-ui/src/view/home/index.html rename to dolphinscheduler-ui/src/view/home/index.html diff --git a/escheduler-ui/src/view/login/index.html b/dolphinscheduler-ui/src/view/login/index.html similarity index 100% rename from escheduler-ui/src/view/login/index.html rename to dolphinscheduler-ui/src/view/login/index.html diff --git a/escheduler-alert/pom.xml b/escheduler-alert/pom.xml deleted file mode 100644 index 630c519fde..0000000000 --- a/escheduler-alert/pom.xml +++ /dev/null @@ -1,129 +0,0 @@ - - - 4.0.0 - - cn.analysys - escheduler - 1.1.0-SNAPSHOT - - escheduler-alert - jar - - - UTF-8 - - - - junit - junit - test - - - - org.apache.commons - commons-email - - - - org.freemarker - freemarker - - - - com.alibaba - fastjson - - - - com.fasterxml.jackson.core - jackson-core - - - - com.fasterxml.jackson.core - jackson-databind - - - - org.slf4j - slf4j-api - - - - org.apache.commons - commons-collections4 - - - - commons-logging - commons-logging - - - - org.apache.commons - commons-lang3 - - - - com.google.guava - guava - - - - ch.qos.logback - logback-classic - - - - commons-io - commons-io - - - - - - org.apache.poi - poi - - - - cn.analysys - escheduler-dao - - - - - - - - maven-assembly-plugin - 2.6 - - - src/main/assembly/package.xml - - false - - - - make-assembly - package - - single - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - ${java.version} - ${java.version} - ${project.build.sourceEncoding} - - - - - - diff --git a/escheduler-alert/src/main/assembly/package.xml b/escheduler-alert/src/main/assembly/package.xml deleted file mode 100644 index 68a30262fa..0000000000 --- a/escheduler-alert/src/main/assembly/package.xml +++ /dev/null @@ -1,40 +0,0 @@ - - cluster - - dir - - false - - - src/main/resources - - **/*.properties - **/*.xml - **/*.json - **/*.ftl - - conf - - - target/ - - escheduler-alert-${project.version}.jar - - lib - - - - - lib - true - - javax.servlet:servlet-api - org.eclipse.jetty.aggregate:jetty-all - org.slf4j:slf4j-log4j12 - - - - \ No newline at end of file diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/AlertServer.java b/escheduler-alert/src/main/java/cn/escheduler/alert/AlertServer.java deleted file mode 100644 index fbffe4453f..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/AlertServer.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert; - -import cn.escheduler.alert.runner.AlertSender; -import cn.escheduler.alert.utils.Constants; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.dao.AlertDao; -import cn.escheduler.dao.entity.Alert; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.CommandLineRunner; -import org.springframework.boot.SpringApplication; -import org.springframework.context.annotation.ComponentScan; - -import java.util.List; - -/** - * alert of start - */ -@ComponentScan("cn.escheduler") -public class AlertServer implements CommandLineRunner { - private static final Logger logger = LoggerFactory.getLogger(AlertServer.class); - /** - * Alert Dao - */ - @Autowired - private AlertDao alertDao; - - private AlertSender alertSender; - - private static volatile AlertServer instance; - - public AlertServer() { - - } - - public static AlertServer getInstance(){ - if (null == instance) { - synchronized (AlertServer.class) { - if(null == instance) { - instance = new AlertServer(); - } - } - } - return instance; - } - - public void start(){ - logger.info("Alert Server ready start!"); - while (Stopper.isRunning()){ - try { - Thread.sleep(Constants.ALERT_SCAN_INTERVEL); - } catch (InterruptedException e) { - logger.error(e.getMessage(),e); - } - List alerts = alertDao.listWaitExecutionAlert(); - alertSender = new AlertSender(alerts, alertDao); - alertSender.run(); - } - } - - - public static void main(String[] args){ - SpringApplication app = new SpringApplication(AlertServer.class); - app.run(args); - } - - @Override - public void run(String... strings) throws Exception { - AlertServer alertServer = AlertServer.getInstance(); - alertServer.start(); - } -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/manager/EmailManager.java b/escheduler-alert/src/main/java/cn/escheduler/alert/manager/EmailManager.java deleted file mode 100644 index 0c50118526..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/manager/EmailManager.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.manager; - -import cn.escheduler.alert.utils.MailUtils; -import cn.escheduler.common.enums.ShowType; - -import java.util.List; -import java.util.Map; - -/** - * email send manager - */ -public class EmailManager { - /** - * email send - * @param receviersList - * @param receviersCcList - * @param title - * @param content - * @param showType - * @return - */ - public Map send(List receviersList,List receviersCcList,String title,String content,ShowType showType){ - - return MailUtils.sendMails(receviersList,receviersCcList,title, content, showType); - } - - /** - * msg send - * @param receviersList - * @param title - * @param content - * @param showType - * @return - */ - public Map send(List receviersList,String title,String content,ShowType showType){ - - return MailUtils.sendMails(receviersList,title, content, showType); - } -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/manager/EnterpriseWeChatManager.java b/escheduler-alert/src/main/java/cn/escheduler/alert/manager/EnterpriseWeChatManager.java deleted file mode 100644 index 426861925c..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/manager/EnterpriseWeChatManager.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.manager; - -import cn.escheduler.alert.utils.Constants; -import cn.escheduler.alert.utils.EnterpriseWeChatUtils; -import cn.escheduler.dao.entity.Alert; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Enterprise WeChat Manager - */ -public class EnterpriseWeChatManager { - private static final Logger logger = LoggerFactory.getLogger(MsgManager.class); - /** - * Enterprise We Chat send - * @param alert - */ - public Map send(Alert alert, String token){ - Map retMap = new HashMap<>(); - retMap.put(Constants.STATUS, false); - String agentId = EnterpriseWeChatUtils.enterpriseWeChatAgentId; - String users = EnterpriseWeChatUtils.enterpriseWeChatUsers; - List userList = Arrays.asList(users.split(",")); - logger.info("send message {}",alert); - String msg = EnterpriseWeChatUtils.makeUserSendMsg(userList, agentId,EnterpriseWeChatUtils.markdownByAlert(alert)); - try { - EnterpriseWeChatUtils.sendEnterpriseWeChat(Constants.UTF_8, msg, token); - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - retMap.put(Constants.STATUS, true); - return retMap; - } - -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/manager/MsgManager.java b/escheduler-alert/src/main/java/cn/escheduler/alert/manager/MsgManager.java deleted file mode 100644 index b2f8172991..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/manager/MsgManager.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.manager; - -import cn.escheduler.dao.entity.Alert; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * SMS send manager - */ -public class MsgManager { - - private static final Logger logger = LoggerFactory.getLogger(MsgManager.class); - /** - * SMS send - * @param alert - */ - public void send(Alert alert){ - logger.info("send message {}",alert); - } -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/runner/AlertSender.java b/escheduler-alert/src/main/java/cn/escheduler/alert/runner/AlertSender.java deleted file mode 100644 index 4ecdd2a51a..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/runner/AlertSender.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.runner; - -import cn.escheduler.alert.manager.EmailManager; -import cn.escheduler.alert.manager.EnterpriseWeChatManager; -import cn.escheduler.alert.utils.Constants; -import cn.escheduler.alert.utils.EnterpriseWeChatUtils; -import cn.escheduler.common.enums.AlertStatus; -import cn.escheduler.common.enums.AlertType; -import cn.escheduler.dao.AlertDao; -import cn.escheduler.dao.entity.Alert; -import cn.escheduler.dao.entity.User; -import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * alert sender - */ -public class AlertSender{ - - private static final Logger logger = LoggerFactory.getLogger(AlertSender.class); - - private static final EmailManager emailManager= new EmailManager(); - private static final EnterpriseWeChatManager weChatManager= new EnterpriseWeChatManager(); - - - private List alertList; - private AlertDao alertDao; - - public AlertSender(){} - public AlertSender(List alertList, AlertDao alertDao){ - super(); - this.alertList = alertList; - this.alertDao = alertDao; - } - - public void run() { - - List users; - - Map retMaps = null; - for(Alert alert:alertList){ - users = alertDao.listUserByAlertgroupId(alert.getAlertGroupId()); - - - - // receiving group list - List receviersList = new ArrayList(); - for(User user:users){ - receviersList.add(user.getEmail()); - } - // custom receiver - String receivers = alert.getReceivers(); - if (StringUtils.isNotEmpty(receivers)){ - String[] splits = receivers.split(","); - for (String receiver : splits){ - receviersList.add(receiver); - } - } - - // copy list - List receviersCcList = new ArrayList(); - - - // Custom Copier - String receiversCc = alert.getReceiversCc(); - - if (StringUtils.isNotEmpty(receiversCc)){ - String[] splits = receiversCc.split(","); - for (String receiverCc : splits){ - receviersCcList.add(receiverCc); - } - } - - if (CollectionUtils.isEmpty(receviersList) && CollectionUtils.isEmpty(receviersCcList)) { - logger.warn("alert send error : At least one receiver address required"); - alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, "execution failure,At least one receiver address required.", alert.getId()); - continue; - } - - if (alert.getAlertType() == AlertType.EMAIL){ - retMaps = emailManager.send(receviersList,receviersCcList, alert.getTitle(), alert.getContent(),alert.getShowType()); - - alert.setInfo(retMaps); - }else if (alert.getAlertType() == AlertType.SMS){ - retMaps = emailManager.send(getReciversForSMS(users), alert.getTitle(), alert.getContent(),alert.getShowType()); - alert.setInfo(retMaps); - } - - boolean flag = Boolean.parseBoolean(String.valueOf(retMaps.get(Constants.STATUS))); - if (flag){ - alertDao.updateAlert(AlertStatus.EXECUTION_SUCCESS, "execution success", alert.getId()); - logger.info("alert send success"); - try { - String token = EnterpriseWeChatUtils.getToken(); - weChatManager.send(alert,token); - } catch (Exception e) { - logger.error(e.getMessage(),e); - } - }else { - alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE,String.valueOf(retMaps.get(Constants.MESSAGE)),alert.getId()); - logger.info("alert send error : {}" , String.valueOf(retMaps.get(Constants.MESSAGE))); - } - } - - } - - - /** - * get a list of SMS users - * @param users - * @return - */ - private List getReciversForSMS(List users){ - List list = new ArrayList<>(); - for (User user : users){ - list.add(user.getPhone()); - } - return list; - } -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/Constants.java b/escheduler-alert/src/main/java/cn/escheduler/alert/utils/Constants.java deleted file mode 100644 index 9e551822bb..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/Constants.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.utils; - -/** - * constants - */ -public class Constants { - - /** - * alert properties path - */ - public static final String ALERT_PROPERTIES_PATH = "/alert.properties"; - - public static final String DATA_SOURCE_PROPERTIES_PATH = "/dao/data_source.properties__"; - - public static final String SINGLE_SLASH = "/"; - - /** - * UTF-8 - */ - public static final String UTF_8 = "UTF-8"; - - public static final String STATUS = "status"; - - public static final String MESSAGE = "message"; - - public static final String MAIL_PROTOCOL = "mail.protocol"; - - public static final String MAIL_SERVER_HOST = "mail.server.host"; - - public static final String MAIL_SERVER_PORT = "mail.server.port"; - - public static final String MAIL_SENDER = "mail.sender"; - - public static final String MAIL_USER = "mail.user"; - - public static final String MAIL_PASSWD = "mail.passwd"; - - public static final String XLS_FILE_PATH = "xls.file.path"; - - public static final String MAIL_HOST = "mail.smtp.host"; - - public static final String MAIL_PORT = "mail.smtp.port"; - - public static final String MAIL_SMTP_AUTH = "mail.smtp.auth"; - - public static final String MAIL_TRANSPORT_PROTOCOL = "mail.transport.protocol"; - - public static final String MAIL_SMTP_STARTTLS_ENABLE = "mail.smtp.starttls.enable"; - - public static final String MAIL_SMTP_SSL_ENABLE = "mail.smtp.ssl.enable"; - - public static final String MAIL_SMTP_SSL_TRUST="mail.smtp.ssl.trust"; - - public static final String TEXT_HTML_CHARSET_UTF_8 = "text/html;charset=utf-8"; - - public static final String STRING_TRUE = "true"; - - public static final String EXCEL_SUFFIX_XLS = ".xls"; - - public static final int NUMBER_1000 = 1000; - - public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name"; - - public static final String SPRING_DATASOURCE_URL = "spring.datasource.url"; - - public static final String SPRING_DATASOURCE_USERNAME = "spring.datasource.username"; - - public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password"; - - public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout"; - - public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize"; - - public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle"; - - public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive"; - - public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait"; - - public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis"; - - public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis"; - - public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery"; - - public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle"; - - public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow"; - - public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn"; - - public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements"; - - public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit"; - - public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive"; - - public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize"; - - public static final String DEVELOPMENT = "development"; - - public static final String CLASSPATH_MAIL_TEMPLATES_ALERT_MAIL_TEMPLATE_FTL = "classpath:mail_templates/alert_mail_template.ftl"; - - public static final String TR = ""; - - public static final String TD = ""; - - public static final String TD_END = ""; - - public static final String TR_END = ""; - - public static final String TITLE = "title"; - - public static final String CONTENT = "content"; - - public static final String TH = ""; - - public static final String TH_END = ""; - - public static final int ALERT_SCAN_INTERVEL = 5000; - - public static final String MARKDOWN_QUOTE = ">"; - - public static final String MARKDOWN_ENTER = "\n"; - - public static final String ENTERPRISE_WECHAT_CORP_ID = "enterprise.wechat.corp.id"; - - public static final String ENTERPRISE_WECHAT_SECRET = "enterprise.wechat.secret"; - - public static final String ENTERPRISE_WECHAT_TOKEN_URL = "enterprise.wechat.token.url"; - - public static final String ENTERPRISE_WECHAT_PUSH_URL = "enterprise.wechat.push.url"; - - public static final String ENTERPRISE_WECHAT_TEAM_SEND_MSG = "enterprise.wechat.team.send.msg"; - - public static final String ENTERPRISE_WECHAT_USER_SEND_MSG = "enterprise.wechat.user.send.msg"; - - public static final String ENTERPRISE_WECHAT_AGENT_ID = "enterprise.wechat.agent.id"; - - public static final String ENTERPRISE_WECHAT_USERS = "enterprise.wechat.users"; -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/EnterpriseWeChatUtils.java b/escheduler-alert/src/main/java/cn/escheduler/alert/utils/EnterpriseWeChatUtils.java deleted file mode 100644 index dc066463fd..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/EnterpriseWeChatUtils.java +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.utils; - -import cn.escheduler.common.enums.ShowType; -import cn.escheduler.dao.entity.Alert; -import com.alibaba.fastjson.JSON; - -import com.google.common.reflect.TypeToken; -import org.apache.commons.lang3.StringUtils; -import org.apache.http.HttpEntity; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.util.EntityUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.*; - -import static cn.escheduler.alert.utils.PropertyUtils.getString; - -/** - * Enterprise WeChat utils - */ -public class EnterpriseWeChatUtils { - - public static final Logger logger = LoggerFactory.getLogger(EnterpriseWeChatUtils.class); - - private static final String enterpriseWeChatCorpId = getString(Constants.ENTERPRISE_WECHAT_CORP_ID); - - private static final String enterpriseWeChatSecret = getString(Constants.ENTERPRISE_WECHAT_SECRET); - - private static final String enterpriseWeChatTokenUrl = getString(Constants.ENTERPRISE_WECHAT_TOKEN_URL); - private static String enterpriseWeChatTokenUrlReplace = enterpriseWeChatTokenUrl - .replaceAll("\\$corpId", enterpriseWeChatCorpId) - .replaceAll("\\$secret", enterpriseWeChatSecret); - - private static final String enterpriseWeChatPushUrl = getString(Constants.ENTERPRISE_WECHAT_PUSH_URL); - - private static final String enterpriseWeChatTeamSendMsg = getString(Constants.ENTERPRISE_WECHAT_TEAM_SEND_MSG); - - private static final String enterpriseWeChatUserSendMsg = getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG); - - public static final String enterpriseWeChatAgentId = getString(Constants.ENTERPRISE_WECHAT_AGENT_ID); - - public static final String enterpriseWeChatUsers = getString(Constants.ENTERPRISE_WECHAT_USERS); - - /** - * get Enterprise WeChat token info - * @return token string info - * @throws IOException - */ - public static String getToken() throws IOException { - String resp; - - CloseableHttpClient httpClient = HttpClients.createDefault(); - HttpGet httpGet = new HttpGet(enterpriseWeChatTokenUrlReplace); - CloseableHttpResponse response = httpClient.execute(httpGet); - try { - HttpEntity entity = response.getEntity(); - resp = EntityUtils.toString(entity, Constants.UTF_8); - EntityUtils.consume(entity); - } finally { - response.close(); - } - - Map map = JSON.parseObject(resp, - new TypeToken>() { - }.getType()); - return map.get("access_token").toString(); - } - - /** - * make team single Enterprise WeChat message - * @param toParty - * @param agentId - * @param msg - * @return Enterprise WeChat send message - */ - public static String makeTeamSendMsg(String toParty, String agentId, String msg) { - return enterpriseWeChatTeamSendMsg.replaceAll("\\$toParty", toParty) - .replaceAll("\\$agentId", agentId) - .replaceAll("\\$msg", msg); - } - - /** - * make team multi Enterprise WeChat message - * @param toParty - * @param agentId - * @param msg - * @return Enterprise WeChat send message - */ - public static String makeTeamSendMsg(Collection toParty, String agentId, String msg) { - String listParty = FuncUtils.mkString(toParty, "|"); - return enterpriseWeChatTeamSendMsg.replaceAll("\\$toParty", listParty) - .replaceAll("\\$agentId", agentId) - .replaceAll("\\$msg", msg); - } - - /** - * make team single user message - * @param toUser - * @param agentId - * @param msg - * @return Enterprise WeChat send message - */ - public static String makeUserSendMsg(String toUser, String agentId, String msg) { - return enterpriseWeChatUserSendMsg.replaceAll("\\$toUser", toUser) - .replaceAll("\\$agentId", agentId) - .replaceAll("\\$msg", msg); - } - - /** - * make team multi user message - * @param toUser - * @param agentId - * @param msg - * @return Enterprise WeChat send message - */ - public static String makeUserSendMsg(Collection toUser, String agentId, String msg) { - String listUser = FuncUtils.mkString(toUser, "|"); - return enterpriseWeChatUserSendMsg.replaceAll("\\$toUser", listUser) - .replaceAll("\\$agentId", agentId) - .replaceAll("\\$msg", msg); - } - - /** - * send Enterprise WeChat - * @param charset - * @param data - * @param token - * @return Enterprise WeChat resp, demo: {"errcode":0,"errmsg":"ok","invaliduser":""} - * @throws IOException - */ - public static String sendEnterpriseWeChat(String charset, String data, String token) throws IOException { - String enterpriseWeChatPushUrlReplace = enterpriseWeChatPushUrl.replaceAll("\\$token", token); - - CloseableHttpClient httpclient = HttpClients.createDefault(); - HttpPost httpPost = new HttpPost(enterpriseWeChatPushUrlReplace); - httpPost.setEntity(new StringEntity(data, charset)); - CloseableHttpResponse response = httpclient.execute(httpPost); - String resp; - try { - HttpEntity entity = response.getEntity(); - resp = EntityUtils.toString(entity, charset); - EntityUtils.consume(entity); - } finally { - response.close(); - } - logger.info("Enterprise WeChat send [{}], param:{}, resp:{}", enterpriseWeChatPushUrl, data, resp); - return resp; - } - - /** - * convert table to markdown style - * @param title - * @param content - * @return - */ - public static String markdownTable(String title,String content){ - List mapItemsList = JSONUtils.toList(content, LinkedHashMap.class); - StringBuilder contents = new StringBuilder(200); - for (LinkedHashMap mapItems : mapItemsList){ - - Set> entries = mapItems.entrySet(); - - Iterator> iterator = entries.iterator(); - - StringBuilder t = new StringBuilder(String.format("`%s`%s",title,Constants.MARKDOWN_ENTER)); - while (iterator.hasNext()){ - - Map.Entry entry = iterator.next(); - t.append(Constants.MARKDOWN_QUOTE); - t.append(entry.getKey()).append(":").append(entry.getValue()); - t.append(Constants.MARKDOWN_ENTER); - } - - contents.append(t); - } - return contents.toString(); - } - - /** - * convert text to markdown style - * @param title - * @param content - * @return - */ - public static String markdownText(String title,String content){ - if (StringUtils.isNotEmpty(content)){ - List list; - try { - list = JSONUtils.toList(content,String.class); - }catch (Exception e){ - logger.error("json format exception",e); - return null; - } - - StringBuilder contents = new StringBuilder(100); - contents.append(String.format("`%s`\n",title)); - for (String str : list){ - contents.append(Constants.MARKDOWN_QUOTE); - contents.append(str); - contents.append(Constants.MARKDOWN_ENTER); - } - - return contents.toString(); - - } - return null; - } - - /** - * Determine the mardown style based on the show type of the alert - * @param alert - * @return - */ - public static String markdownByAlert(Alert alert){ - String result = ""; - if (alert.getShowType() == ShowType.TABLE) { - result = markdownTable(alert.getTitle(),alert.getContent()); - }else if(alert.getShowType() == ShowType.TEXT){ - result = markdownText(alert.getTitle(),alert.getContent()); - } - return result; - - } - -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/ExcelUtils.java b/escheduler-alert/src/main/java/cn/escheduler/alert/utils/ExcelUtils.java deleted file mode 100644 index ef9fe61932..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/ExcelUtils.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.utils; - -import org.apache.poi.hssf.usermodel.HSSFCell; -import org.apache.poi.hssf.usermodel.HSSFRow; -import org.apache.poi.hssf.usermodel.HSSFSheet; -import org.apache.poi.hssf.usermodel.HSSFWorkbook; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.FileOutputStream; -import java.io.IOException; -import java.util.*; - -/** - * excel utils - */ -public class ExcelUtils { - - private static final Logger logger = LoggerFactory.getLogger(ExcelUtils.class); - /** - * generate excel file - * @param content - * @param title - * @param xlsFilePath - * @return - * @throws Exception - */ - public static void genExcelFile(String content,String title,String xlsFilePath){ - List itemsList; - try { - itemsList = JSONUtils.toList(content, LinkedHashMap.class); - }catch (Exception e){ - logger.error(String.format("json format incorrect : %s",content),e); - throw new RuntimeException("json format incorrect",e); - } - - if (itemsList == null || itemsList.size() == 0){ - logger.error("itemsList is null"); - throw new RuntimeException("itemsList is null"); - } - - LinkedHashMap headerMap = itemsList.get(0); - - List headerList = new ArrayList<>(); - - Iterator> iter = headerMap.entrySet().iterator(); - while (iter.hasNext()){ - Map.Entry en = iter.next(); - headerList.add(en.getKey()); - } - - HSSFWorkbook wb = null; - FileOutputStream fos = null; - try { - // declare a workbook - wb = new HSSFWorkbook(); - // generate a table - HSSFSheet sheet = wb.createSheet(); - HSSFRow row = sheet.createRow(0); - //set the height of the first line - row.setHeight((short)500); - - - //setting excel headers - for (int i = 0; i < headerList.size(); i++) { - HSSFCell cell = row.createCell(i); - cell.setCellValue(headerList.get(i)); - } - - //setting excel body - int rowIndex = 1; - for (LinkedHashMap itemsMap : itemsList){ - Object[] values = itemsMap.values().toArray(); - row = sheet.createRow(rowIndex); - //setting excel body height - row.setHeight((short)500); - rowIndex++; - for (int j = 0 ; j < values.length ; j++){ - HSSFCell cell1 = row.createCell(j); - cell1.setCellValue(String.valueOf(values[j])); - } - } - - for (int i = 0; i < headerList.size(); i++) { - sheet.setColumnWidth(i, headerList.get(i).length() * 800); - - } - - //setting file output - fos = new FileOutputStream(xlsFilePath + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS); - - wb.write(fos); - - }catch (Exception e){ - logger.error("generate excel error",e); - throw new RuntimeException("generate excel error",e); - }finally { - if (wb != null){ - try { - wb.close(); - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - } - if (fos != null){ - try { - fos.close(); - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - } - } - } - -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/FuncUtils.java b/escheduler-alert/src/main/java/cn/escheduler/alert/utils/FuncUtils.java deleted file mode 100644 index c9dbe1d676..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/FuncUtils.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.utils; - -public class FuncUtils { - - static public String mkString(Iterable list, String split) { - StringBuilder sb = new StringBuilder(); - boolean first = true; - for (String item : list) { - if (first) { - first = false; - } else { - sb.append(split); - } - sb.append(item); - } - return sb.toString(); - } - -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/JSONUtils.java b/escheduler-alert/src/main/java/cn/escheduler/alert/utils/JSONUtils.java deleted file mode 100644 index 6c1c468475..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/JSONUtils.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.utils; - -import com.alibaba.fastjson.JSONArray; -import com.alibaba.fastjson.JSONObject; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -/** - * json utils - */ -public class JSONUtils { - - private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class); - - /** - * object to json string - * @param object - * @return json string - */ - public static String toJsonString(Object object) { - try{ - return JSONObject.toJSONString(object,false); - } catch (Exception e) { - throw new RuntimeException("Json deserialization exception.", e); - } - } - - /** - * json to list - * - * @param json - * @param clazz c - * @param - * @return - */ - public static List toList(String json, Class clazz) { - if (StringUtils.isEmpty(json)) { - return null; - } - try { - return JSONArray.parseArray(json, clazz); - } catch (Exception e) { - logger.error("JSONArray.parseArray exception!",e); - } - - return null; - } - -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/MailUtils.java b/escheduler-alert/src/main/java/cn/escheduler/alert/utils/MailUtils.java deleted file mode 100644 index 82add04daf..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/MailUtils.java +++ /dev/null @@ -1,459 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.utils; - -import cn.escheduler.common.enums.ShowType; -import freemarker.cache.StringTemplateLoader; -import freemarker.template.Configuration; -import freemarker.template.Template; -import freemarker.template.TemplateException; -import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.mail.EmailException; -import org.apache.commons.mail.HtmlEmail; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.util.ResourceUtils; - -import javax.mail.*; -import javax.mail.internet.*; -import java.io.*; -import java.security.Security; -import java.util.*; - -import static cn.escheduler.alert.utils.PropertyUtils.getBoolean; -import static cn.escheduler.alert.utils.PropertyUtils.getInt; -import static cn.escheduler.alert.utils.PropertyUtils.getString; - - -/** - * mail utils - */ -public class MailUtils { - - public static final Logger logger = LoggerFactory.getLogger(MailUtils.class); - - public static final String mailProtocol = getString(Constants.MAIL_PROTOCOL); - - public static final String mailServerHost = getString(Constants.MAIL_SERVER_HOST); - - public static final Integer mailServerPort = getInt(Constants.MAIL_SERVER_PORT); - - public static final String mailSender = getString(Constants.MAIL_SENDER); - - public static final String mailUser = getString(Constants.MAIL_USER); - - public static final String mailPasswd = getString(Constants.MAIL_PASSWD); - - public static final Boolean mailUseStartTLS = getBoolean(Constants.MAIL_SMTP_STARTTLS_ENABLE); - - public static final Boolean mailUseSSL = getBoolean(Constants.MAIL_SMTP_SSL_ENABLE); - - public static final String xlsFilePath = getString(Constants.XLS_FILE_PATH); - - public static final String starttlsEnable = getString(Constants.MAIL_SMTP_STARTTLS_ENABLE); - - public static final String sslEnable = getString(Constants.MAIL_SMTP_SSL_ENABLE); - - public static final String sslTrust = getString(Constants.MAIL_SMTP_SSL_TRUST); - - private static Template MAIL_TEMPLATE; - - static { - Configuration cfg = new Configuration(Configuration.VERSION_2_3_21); - cfg.setDefaultEncoding(Constants.UTF_8); - StringTemplateLoader stringTemplateLoader = new StringTemplateLoader(); - cfg.setTemplateLoader(stringTemplateLoader); - InputStreamReader isr = null; - try { - isr = new InputStreamReader(new FileInputStream(ResourceUtils.getFile(Constants.CLASSPATH_MAIL_TEMPLATES_ALERT_MAIL_TEMPLATE_FTL)), - Constants.UTF_8); - - MAIL_TEMPLATE = new Template("alert_mail_template", isr, cfg); - } catch (Exception e) { - MAIL_TEMPLATE = null; - } finally { - IOUtils.closeQuietly(isr); - } - } - - - /** - * send mail to receivers - * - * @param receivers - * @param title - * @param content - * @return - */ - public static Map sendMails(Collection receivers, String title, String content,ShowType showType) { - return sendMails(receivers, null, title, content, showType); - } - - /** - * send mail - * @param receivers - * @param receiversCc cc - * @param title title - * @param content content - * @param showType mail type - * @return - */ - public static Map sendMails(Collection receivers, Collection receiversCc, String title, String content, ShowType showType) { - Map retMap = new HashMap<>(); - retMap.put(Constants.STATUS, false); - - // if there is no receivers && no receiversCc, no need to process - if (CollectionUtils.isEmpty(receivers) && CollectionUtils.isEmpty(receiversCc)) { - return retMap; - } - - receivers.removeIf((from) -> (StringUtils.isEmpty(from))); - - if (showType == ShowType.TABLE || showType == ShowType.TEXT){ - // send email - HtmlEmail email = new HtmlEmail(); - - try { - Session session = getSession(); - email.setMailSession(session); - email.setFrom(mailSender); - email.setCharset(Constants.UTF_8); - if (CollectionUtils.isNotEmpty(receivers)){ - // receivers mail - for (String receiver : receivers) { - email.addTo(receiver); - } - } - - if (CollectionUtils.isNotEmpty(receiversCc)){ - //cc - for (String receiverCc : receiversCc) { - email.addCc(receiverCc); - } - } - // sender mail - return getStringObjectMap(title, content, showType, retMap, email); - } catch (Exception e) { - handleException(receivers, retMap, e); - } - }else if (showType == ShowType.ATTACHMENT || showType == ShowType.TABLEATTACHMENT){ - try { - - String partContent = (showType == ShowType.ATTACHMENT ? "Please see the attachment " + title + Constants.EXCEL_SUFFIX_XLS : htmlTable(content,false)); - - attachment(receivers,receiversCc,title,content,partContent); - - retMap.put(Constants.STATUS, true); - return retMap; - }catch (Exception e){ - handleException(receivers, retMap, e); - return retMap; - } - } - return retMap; - - } - - /** - * html table content - * @param content - * @param showAll - * @return - */ - private static String htmlTable(String content, boolean showAll){ - if (StringUtils.isNotEmpty(content)){ - List mapItemsList = JSONUtils.toList(content, LinkedHashMap.class); - - if(!showAll && mapItemsList.size() > Constants.NUMBER_1000){ - mapItemsList = mapItemsList.subList(0,Constants.NUMBER_1000); - } - - StringBuilder contents = new StringBuilder(200); - - boolean flag = true; - - String title = ""; - for (LinkedHashMap mapItems : mapItemsList){ - - Set> entries = mapItems.entrySet(); - - Iterator> iterator = entries.iterator(); - - StringBuilder t = new StringBuilder(Constants.TR); - StringBuilder cs = new StringBuilder(Constants.TR); - while (iterator.hasNext()){ - - Map.Entry entry = iterator.next(); - t.append(Constants.TH).append(entry.getKey()).append(Constants.TH_END); - cs.append(Constants.TD).append(String.valueOf(entry.getValue())).append(Constants.TD_END); - - } - t.append(Constants.TR_END); - cs.append(Constants.TR_END); - if (flag){ - title = t.toString(); - } - flag = false; - contents.append(cs); - } - - return getTemplateContent(title,contents.toString()); - } - - return null; - } - - /** - * html table content - * @param content - * @return - */ - private static String htmlTable(String content){ - return htmlTable(content,true); - } - - /** - * html text content - * @param content - * @return - */ - private static String htmlText(String content){ - - if (StringUtils.isNotEmpty(content)){ - List list; - try { - list = JSONUtils.toList(content,String.class); - }catch (Exception e){ - logger.error("json format exception",e); - return null; - } - - StringBuilder contents = new StringBuilder(100); - for (String str : list){ - contents.append(Constants.TR); - contents.append(Constants.TD).append(str).append(Constants.TD_END); - contents.append(Constants.TR_END); - } - - return getTemplateContent(null,contents.toString()); - - } - - return null; - } - - - - - /** - * send mail as Excel attachment - * - * @param receivers - * @param title - * @throws Exception - */ - private static void attachment(Collection receivers,Collection receiversCc,String title,String content,String partContent)throws Exception{ - MimeMessage msg = getMimeMessage(receivers); - - attachContent(receiversCc, title, content,partContent, msg); - } - - /** - * get MimeMessage - * @param receivers - * @return - * @throws MessagingException - */ - private static MimeMessage getMimeMessage(Collection receivers) throws MessagingException { -// Security.addProvider(new com.sun.net.ssl.internal.ssl.Provider()); -// final String SSL_FACTORY = "javax.net.ssl.SSLSocketFactory"; - - // 1. The first step in creating mail: creating session - Session session = getSession(); - // Setting debug mode, can be turned off - session.setDebug(false); - - // 2. creating mail: Creating a MimeMessage - MimeMessage msg = new MimeMessage(session); - // 3. set sender - msg.setFrom(new InternetAddress(mailSender)); - // 4. set receivers - for (String receiver : receivers) { - msg.addRecipients(MimeMessage.RecipientType.TO, InternetAddress.parse(receiver)); - } - return msg; - } - - /** - * get session - * @return - */ - private static Session getSession() { - Properties props = new Properties(); - props.setProperty(Constants.MAIL_HOST, mailServerHost); - props.setProperty(Constants.MAIL_PORT, String.valueOf(mailServerPort)); - props.setProperty(Constants.MAIL_SMTP_AUTH, Constants.STRING_TRUE); - props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, mailProtocol); - props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, starttlsEnable); - props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, sslEnable); - props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, sslTrust); - - Authenticator auth = new Authenticator() { - @Override - protected PasswordAuthentication getPasswordAuthentication() { - // mail username and password - return new PasswordAuthentication(mailUser, mailPasswd); - } - }; - - Session session = Session.getInstance(props, auth); - return session; - } - - /** - * - * @param receiversCc - * @param title - * @param content - * @param partContent - * @param msg - * @throws MessagingException - * @throws IOException - */ - private static void attachContent(Collection receiversCc, String title, String content, String partContent,MimeMessage msg) throws MessagingException, IOException { - /** - * set receiverCc - */ - if(CollectionUtils.isNotEmpty(receiversCc)){ - for (String receiverCc : receiversCc){ - msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(receiverCc)); - } - } - - // set receivers type to cc - // msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(propMap.get("${CC}"))); - // set subject - msg.setSubject(title); - MimeMultipart partList = new MimeMultipart(); - // set signature - MimeBodyPart part1 = new MimeBodyPart(); - part1.setContent(partContent, Constants.TEXT_HTML_CHARSET_UTF_8); - // set attach file - MimeBodyPart part2 = new MimeBodyPart(); - // make excel file - ExcelUtils.genExcelFile(content,title,xlsFilePath); - File file = new File(xlsFilePath + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS); - part2.attachFile(file); - part2.setFileName(MimeUtility.encodeText(title + Constants.EXCEL_SUFFIX_XLS)); - // add components to collection - partList.addBodyPart(part1); - partList.addBodyPart(part2); - msg.setContent(partList); - // 5. send Transport - Transport.send(msg); - // 6. delete saved file - deleteFile(file); - } - - /** - * - * @param title - * @param content - * @param showType - * @param retMap - * @param email - * @return - * @throws EmailException - */ - private static Map getStringObjectMap(String title, String content, ShowType showType, Map retMap, HtmlEmail email) throws EmailException { - - /** - * the subject of the message to be sent - */ - email.setSubject(title); - /** - * to send information, you can use HTML tags in mail content because of the use of HtmlEmail - */ - if (showType == ShowType.TABLE) { - email.setMsg(htmlTable(content)); - } else if (showType == ShowType.TEXT) { - email.setMsg(htmlText(content)); - } - - // send - email.send(); - - retMap.put(Constants.STATUS, true); - - return retMap; - } - - /** - * file delete - * @param file - */ - public static void deleteFile(File file){ - if(file.exists()){ - if(file.delete()){ - logger.info("delete success:"+file.getAbsolutePath()+file.getName()); - }else{ - logger.info("delete fail"+file.getAbsolutePath()+file.getName()); - } - }else{ - logger.info("file not exists:"+file.getAbsolutePath()+file.getName()); - } - } - - - /** - * - * @param receivers - * @param retMap - * @param e - */ - private static void handleException(Collection receivers, Map retMap, Exception e) { - logger.error("Send email to {} failed", StringUtils.join(",", receivers), e); - retMap.put(Constants.MESSAGE, "Send email to {" + StringUtils.join(",", receivers) + "} failed," + e.toString()); - } - - /** - * - * @param title - * @param content - * @return - */ - private static String getTemplateContent(String title,String content){ - StringWriter out = new StringWriter(); - Map map = new HashMap<>(); - if(null != title){ - map.put(Constants.TITLE,title); - } - map.put(Constants.CONTENT,content); - try { - MAIL_TEMPLATE.process(map, out); - return out.toString(); - } catch (TemplateException e) { - logger.error(e.getMessage(),e); - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - - return null; - } -} diff --git a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/PropertyUtils.java b/escheduler-alert/src/main/java/cn/escheduler/alert/utils/PropertyUtils.java deleted file mode 100644 index 3b31a28146..0000000000 --- a/escheduler-alert/src/main/java/cn/escheduler/alert/utils/PropertyUtils.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.utils; - -import org.apache.commons.io.IOUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - -import static cn.escheduler.alert.utils.Constants.ALERT_PROPERTIES_PATH; -import static cn.escheduler.alert.utils.Constants.DATA_SOURCE_PROPERTIES_PATH; - -/** - * property utils - * single instance - */ -public class PropertyUtils { - - /** - * logger - */ - private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class); - - private static final Properties properties = new Properties(); - - private static final PropertyUtils propertyUtils = new PropertyUtils(); - - private PropertyUtils(){ - init(); - } - - private void init(){ - String[] propertyFiles = new String[]{ALERT_PROPERTIES_PATH,DATA_SOURCE_PROPERTIES_PATH}; - for (String fileName : propertyFiles) { - InputStream fis = null; - try { - fis = PropertyUtils.class.getResourceAsStream(fileName); - properties.load(fis); - - } catch (IOException e) { - logger.error(e.getMessage(), e); - System.exit(1); - } finally { - IOUtils.closeQuietly(fis); - } - } - } - -/* - public static PropertyUtils getInstance(){ - return propertyUtils; - } -*/ - - /** - * get property value - * - * @param key property name - * @return - */ - public static String getString(String key) { - return properties.getProperty(key); - } - - /** - * get property value - * - * @param key property name - * @return get property int value , if key == null, then return -1 - */ - public static int getInt(String key) { - return getInt(key, -1); - } - - /** - * - * @param key - * @param defaultValue - * @return - */ - public static int getInt(String key, int defaultValue) { - String value = getString(key); - if (value == null) { - return defaultValue; - } - - try { - return Integer.parseInt(value); - } catch (NumberFormatException e) { - logger.info(e.getMessage(),e); - } - return defaultValue; - } - - /** - * get property value - * - * @param key property name - * @return - */ - public static Boolean getBoolean(String key) { - String value = properties.getProperty(key.trim()); - if(null != value){ - return Boolean.parseBoolean(value); - } - - return null; - } - - /** - * - * @param key - * @return - */ - public static long getLong(String key) { - return getLong(key,-1); - } - - /** - * - * @param key - * @param defaultVal - * @return - */ - public static long getLong(String key, long defaultVal) { - String val = getString(key); - return val == null ? defaultVal : Long.parseLong(val); - } - - - /** - * - * @param key - * @param defaultVal - * @return - */ - public double getDouble(String key, double defaultVal) { - String val = getString(key); - return val == null ? defaultVal : Double.parseDouble(val); - } - - - /** - * get array - * @param key property name - * @param splitStr separator - * @return - */ - public static String[] getArray(String key, String splitStr) { - String value = getString(key); - if (value == null) { - return null; - } - try { - String[] propertyArray = value.split(splitStr); - return propertyArray; - } catch (NumberFormatException e) { - logger.info(e.getMessage(),e); - } - return null; - } - - /** - * - * @param key - * @param type - * @param defaultValue - * @param - * @return get enum value - */ - public > T getEnum(String key, Class type, - T defaultValue) { - String val = getString(key); - return val == null ? defaultValue : Enum.valueOf(type, val); - } -} diff --git a/escheduler-alert/src/main/resources/alert_logback.xml b/escheduler-alert/src/main/resources/alert_logback.xml deleted file mode 100644 index c4ca8e9d1f..0000000000 --- a/escheduler-alert/src/main/resources/alert_logback.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - ${log.base}/escheduler-alert.log - - ${log.base}/escheduler-alert.%d{yyyy-MM-dd_HH}.%i.log - 20 - 64MB - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - \ No newline at end of file diff --git a/escheduler-alert/src/test/java/cn/escheduler/alert/utils/EnterpriseWeChatUtilsTest.java b/escheduler-alert/src/test/java/cn/escheduler/alert/utils/EnterpriseWeChatUtilsTest.java deleted file mode 100644 index a226cf3f01..0000000000 --- a/escheduler-alert/src/test/java/cn/escheduler/alert/utils/EnterpriseWeChatUtilsTest.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.utils; - -import com.alibaba.fastjson.JSON; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; - -/** - * Please manually modify the configuration file before testing. - * file: alert.properties - * enterprise.wechat.corp.id - * enterprise.wechat.secret - * enterprise.wechat.token.url - * enterprise.wechat.push.url - * enterprise.wechat.send.msg - * enterprise.wechat.agent.id - * enterprise.wechat.users - */ -@Ignore -public class EnterpriseWeChatUtilsTest { - - private String agentId = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID); // app id - private Collection listUserId = Arrays.asList(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS).split(",")); - - // Please change - private String partyId = "2"; - private Collection listPartyId = Arrays.asList("2","4"); - @Test - public void testSendSingleTeamWeChat() { - try { - String token = EnterpriseWeChatUtils.getToken(); - String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world"); - String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); - - String errmsg = JSON.parseObject(resp).getString("errmsg"); - Assert.assertEquals(errmsg, "ok"); - } catch (IOException e) { - e.printStackTrace(); - } - } - - @Test - public void testSendMultiTeamWeChat() { - - try { - String token = EnterpriseWeChatUtils.getToken(); - String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world"); - String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); - - String errmsg = JSON.parseObject(resp).getString("errmsg"); - Assert.assertEquals(errmsg, "ok"); - } catch (IOException e) { - e.printStackTrace(); - } - } - - @Test - public void testSendSingleUserWeChat() { - try { - String token = EnterpriseWeChatUtils.getToken(); - String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "您的会议室已经预定,稍后会同步到`邮箱` \n" + - ">**事项详情** \n" + - ">事 项:开会
" + - ">组织者:@miglioguan \n" + - ">参与者:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" + - "> \n" + - ">会议室:广州TIT 1楼 301 \n" + - ">日 期:2018年5月18日 \n" + - ">时 间:上午9:00-11:00 \n" + - "> \n" + - ">请准时参加会议。 \n" + - "> \n" + - ">如需修改会议信息,请点击:[修改会议信息](https://work.weixin.qq.com)\""); - - String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); - - String errmsg = JSON.parseObject(resp).getString("errmsg"); - Assert.assertEquals(errmsg, "ok"); - } catch (IOException e) { - e.printStackTrace(); - } - } - - @Test - public void testSendMultiUserWeChat() { - try { - String token = EnterpriseWeChatUtils.getToken(); - - String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world"); - String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); - - String errmsg = JSON.parseObject(resp).getString("errmsg"); - Assert.assertEquals(errmsg, "ok"); - } catch (IOException e) { - e.printStackTrace(); - } - } - -} diff --git a/escheduler-alert/src/test/java/cn/escheduler/alert/utils/MailUtilsTest.java b/escheduler-alert/src/test/java/cn/escheduler/alert/utils/MailUtilsTest.java deleted file mode 100644 index c7b6dd8670..0000000000 --- a/escheduler-alert/src/test/java/cn/escheduler/alert/utils/MailUtilsTest.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.alert.utils; - - -import cn.escheduler.common.enums.AlertType; -import cn.escheduler.common.enums.ShowType; -import cn.escheduler.dao.AlertDao; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.entity.Alert; -import cn.escheduler.dao.entity.User; -import freemarker.cache.StringTemplateLoader; -import freemarker.template.Configuration; -import freemarker.template.Template; -import freemarker.template.TemplateException; -import org.apache.commons.io.IOUtils; -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.util.ResourceUtils; - -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.StringWriter; -import java.util.*; - - -/** - */ -@Ignore -public class MailUtilsTest { - private static final Logger logger = LoggerFactory.getLogger(MailUtilsTest.class); - @Test - public void testSendMails() { - String[] receivers = new String[]{"xx@xx.com"}; - String[] receiversCc = new String[]{"xxx@xxx.com"}; - - String content ="[\"id:69\"," + - "\"name:UserBehavior-0--1193959466\"," + - "\"Job name: 启动工作流\"," + - "\"State: SUCCESS\"," + - "\"Recovery:NO\"," + - "\"Run time: 1\"," + - "\"Start time: 2018-08-06 10:31:34.0\"," + - "\"End time: 2018-08-06 10:31:49.0\"," + - "\"Host: 192.168.xx.xx\"," + - "\"Notify group :4\"]"; - - Alert alert = new Alert(); - alert.setTitle("Mysql异常"); - alert.setShowType(ShowType.TEXT); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(4); - - MailUtils.sendMails(Arrays.asList(receivers),Arrays.asList(receiversCc),alert.getTitle(),alert.getContent(), ShowType.TEXT); - } - - - @Test - public void testQuery(){ - AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); - List alerts = alertDao.listWaitExecutionAlert(); - - String[] mails = new String[]{"xx@xx.com"}; - - for(Alert alert : alerts){ - MailUtils.sendMails(Arrays.asList(mails),"gaojing", alert.getContent(), alert.getShowType()); - } - - } - - public String list2String(){ - - LinkedHashMap map1 = new LinkedHashMap<>(); - map1.put("mysql服务名称","mysql200"); - map1.put("mysql地址","192.168.xx.xx"); - map1.put("端口","3306"); - map1.put("期间内没有使用索引的查询数握","80"); - map1.put("数据库客户端连接数","190"); - - LinkedHashMap map2 = new LinkedHashMap<>(); - map2.put("mysql服务名称","mysql210"); - map2.put("mysql地址","192.168.xx.xx"); - map2.put("端口","3306"); - map2.put("期间内没有使用索引的查询数握","10"); - map2.put("数据库客户端连接数","90"); - - List> maps = new ArrayList<>(); - maps.add(0,map1); - maps.add(1,map2); - String mapjson = JSONUtils.toJsonString(maps); - logger.info(mapjson); - - return mapjson; - - } - - @Test - public void testSendTableMail(){ - String[] mails = new String[]{"xx@xx.com"}; - Alert alert = new Alert(); - alert.setTitle("Mysql Exception"); - alert.setShowType(ShowType.TABLE); - String content= list2String(); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(1); - MailUtils.sendMails(Arrays.asList(mails),"gaojing", alert.getContent(), ShowType.TABLE); - } - - /** - * Used to test add alarm information, mail sent - * Text - */ - @Test - public void addAlertText(){ - AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); - Alert alert = new Alert(); - alert.setTitle("Mysql Exception"); - alert.setShowType(ShowType.TEXT); - alert.setContent("[\"告警时间:2018-02-05\", \"服务名:MYSQL_ALTER\", \"告警名:MYSQL_ALTER_DUMP\", \"获取告警异常!,接口报错,异常信息:timed out\", \"请求地址:http://blog.csdn.net/dreamInTheWorld/article/details/78539286\"]"); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(1); - alertDao.addAlert(alert); - } - - - /** - * Used to test add alarm information, mail sent - * Table - */ - @Test - public void addAlertTable(){ - AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); - Alert alert = new Alert(); - alert.setTitle("Mysql Exception"); - alert.setShowType(ShowType.TABLE); - - String content = list2String(); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(1); - alertDao.addAlert(alert); - } - - @Test - public void testAlertDao(){ - AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); - List users = alertDao.listUserByAlertgroupId(3); - logger.info(users.toString()); - } - - @Test - public void testAttachmentFile()throws Exception{ - String[] mails = new String[]{"xx@xx.com"}; - Alert alert = new Alert(); - alert.setTitle("Mysql Exception"); - alert.setShowType(ShowType.ATTACHMENT); - String content = list2String(); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(1); - MailUtils.sendMails(Arrays.asList(mails),"gaojing",alert.getContent(),ShowType.ATTACHMENT); - } - - @Test - public void testTableAttachmentFile()throws Exception{ - String[] mails = new String[]{"xx@xx.com"}; - Alert alert = new Alert(); - alert.setTitle("Mysql Exception"); - alert.setShowType(ShowType.TABLEATTACHMENT); - String content = list2String(); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(1); - MailUtils.sendMails(Arrays.asList(mails),"gaojing",alert.getContent(),ShowType.TABLEATTACHMENT); - } - - @Test - public void template(){ - Template MAIL_TEMPLATE; - Configuration cfg = new Configuration(Configuration.VERSION_2_3_21); - cfg.setDefaultEncoding(Constants.UTF_8); - StringTemplateLoader stringTemplateLoader = new StringTemplateLoader(); - cfg.setTemplateLoader(stringTemplateLoader); - InputStreamReader isr = null; - try { - isr = new InputStreamReader(new FileInputStream(ResourceUtils.getFile(Constants.CLASSPATH_MAIL_TEMPLATES_ALERT_MAIL_TEMPLATE_FTL)), - Constants.UTF_8); - - MAIL_TEMPLATE = new Template("alert_mail_template", isr, cfg); - } catch (Exception e) { - MAIL_TEMPLATE = null; - } finally { - IOUtils.closeQuietly(isr); - } - - - StringWriter out = new StringWriter(); - Map map = new HashMap<>(); - map.put(Constants.TITLE,"title_test"); - try { - MAIL_TEMPLATE.process(map, out); - logger.info(out.toString()); - - } catch (TemplateException e) { - logger.error(e.getMessage(),e); - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - - } - -} diff --git a/escheduler-api/pom.xml b/escheduler-api/pom.xml deleted file mode 100644 index 766993ca4e..0000000000 --- a/escheduler-api/pom.xml +++ /dev/null @@ -1,219 +0,0 @@ - - 4.0.0 - - cn.analysys - escheduler - 1.1.0-SNAPSHOT - - escheduler-api - jar - - - - cn.analysys - escheduler-alert - - - cn.analysys - escheduler-server - - - io.netty - netty - - - io.netty - netty-all - - - com.google - netty - - - leveldbjni-all - org.fusesource.leveldbjni - - - protobuf-java - com.google.protobuf - - - - - - - org.springframework.boot - spring-boot-starter-web - - - org.springframework.boot - spring-boot-starter-tomcat - - - log4j-to-slf4j - org.apache.logging.log4j - - - - - - - org.springframework.boot - spring-boot-starter-jetty - - - org.eclipse.jetty.websocket - javax-websocket-server-impl - - - org.eclipse.jetty.websocket - websocket-server - - - - - - org.springframework.boot - spring-boot-starter-test - test - - - org.ow2.asm - asm - - - org.springframework.boot - spring-boot - - - org.springframework.boot - spring-boot-autoconfigure - - - - - org.springframework.boot - spring-boot-starter-aop - - - org.springframework.boot - spring-boot-starter - - - - - org.springframework - spring-context - - - - org.apache.httpcomponents - httpcore - - - org.apache.httpcomponents - httpclient - - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-core - - - - com.alibaba - fastjson - - - - commons-collections - commons-collections - - - - org.quartz-scheduler - quartz - - - c3p0 - c3p0 - - - - - - org.quartz-scheduler - quartz-jobs - - - - io.springfox - springfox-swagger2 - 2.9.2 - - - - io.springfox - springfox-swagger-ui - 2.9.2 - - - - com.github.xiaoymin - swagger-bootstrap-ui - 1.9.3 - - - - cn.analysys - escheduler-rpc - - - - junit - junit - 4.12 - test - - - - - - maven-assembly-plugin - 2.6 - - - src/main/assembly/package.xml - - false - - - - make-assembly - package - - single - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - ${java.version} - ${java.version} - ${project.build.sourceEncoding} - - - - - \ No newline at end of file diff --git a/escheduler-api/src/main/assembly/package.xml b/escheduler-api/src/main/assembly/package.xml deleted file mode 100644 index 0d3c6ea573..0000000000 --- a/escheduler-api/src/main/assembly/package.xml +++ /dev/null @@ -1,74 +0,0 @@ - - cluster - - dir - - false - - - src/main/resources - - **/*.properties - **/*.xml - **/*.json - - conf - - - ${project.parent.basedir}/escheduler-common/src/main/resources - - **/*.properties - **/*.xml - **/*.json - - conf - - - ${project.parent.basedir}/escheduler-common/src/main/resources/bin - - *.* - - 755 - bin - - - ${project.parent.basedir}/escheduler-dao/src/main/resources - - **/*.properties - **/*.xml - **/*.json - - conf - - - ${project.parent.basedir}/escheduler-api/src/main/resources - - **/*.properties - **/*.xml - **/*.json - - conf - - - target/ - - escheduler-api-${project.version}.jar - - lib - - - - - lib - true - - javax.servlet:servlet-api - org.eclipse.jetty.aggregate:jetty-all - org.slf4j:slf4j-log4j12 - - - - \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/ApiApplicationServer.java b/escheduler-api/src/main/java/cn/escheduler/api/ApiApplicationServer.java deleted file mode 100644 index 9b84c7f2f7..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/ApiApplicationServer.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api; - -import org.springframework.boot.SpringApplication; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.web.servlet.ServletComponentScan; -import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; -import org.springframework.context.annotation.ComponentScan; -import springfox.documentation.swagger2.annotations.EnableSwagger2; - -@SpringBootApplication -@ServletComponentScan -@ComponentScan("cn.escheduler") -@EnableSwagger2 -public class ApiApplicationServer extends SpringBootServletInitializer { - - public static void main(String[] args) { - SpringApplication.run(ApiApplicationServer.class, args); - } - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/CombinedApplicationServer.java b/escheduler-api/src/main/java/cn/escheduler/api/CombinedApplicationServer.java deleted file mode 100644 index fc421944e9..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/CombinedApplicationServer.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api; - -import cn.escheduler.alert.AlertServer; -import cn.escheduler.dao.AlertDao; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.server.master.MasterServer; -import cn.escheduler.server.rpc.LoggerServer; -import cn.escheduler.server.worker.WorkerServer; -import org.springframework.boot.SpringApplication; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.web.servlet.ServletComponentScan; -import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; -import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.context.annotation.ComponentScan; -import springfox.documentation.swagger2.annotations.EnableSwagger2; - -@SpringBootApplication -@ServletComponentScan -@ComponentScan("cn.escheduler") -@EnableSwagger2 -public class CombinedApplicationServer extends SpringBootServletInitializer { - - public static void main(String[] args) throws Exception { - - ConfigurableApplicationContext context = SpringApplication.run(ApiApplicationServer.class, args); - ProcessDao processDao = context.getBean(ProcessDao.class); - AlertDao alertDao = context.getBean(AlertDao.class); - - MasterServer master = new MasterServer(processDao); - master.run(processDao); - - WorkerServer workerServer = new WorkerServer(processDao, alertDao); - workerServer.run(processDao, alertDao); - - LoggerServer server = new LoggerServer(); - server.start(); - - AlertServer alertServer = AlertServer.getInstance(); - alertServer.start(); - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/configuration/AppConfiguration.java b/escheduler-api/src/main/java/cn/escheduler/api/configuration/AppConfiguration.java deleted file mode 100644 index de0331cc00..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/configuration/AppConfiguration.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.configuration; - -import cn.escheduler.api.interceptor.LoginHandlerInterceptor; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.web.servlet.LocaleResolver; -import org.springframework.web.servlet.config.annotation.*; -import org.springframework.web.servlet.i18n.CookieLocaleResolver; -import org.springframework.web.servlet.i18n.LocaleChangeInterceptor; - -import java.util.Locale; - - -/** - * application configuration - */ -@Configuration -public class AppConfiguration implements WebMvcConfigurer { - - public static final String LOGIN_INTERCEPTOR_PATH_PATTERN = "/**/*"; - public static final String LOGIN_PATH_PATTERN = "/login"; - public static final String PATH_PATTERN = "/**"; - public static final String LOCALE_LANGUAGE_COOKIE = "language"; - public static final int COOKIE_MAX_AGE = 3600; - - - @Bean - public LoginHandlerInterceptor loginInterceptor() { - return new LoginHandlerInterceptor(); - } - - - /** - * Cookie - */ - @Bean(name = "localeResolver") - public LocaleResolver localeResolver() { - CookieLocaleResolver localeResolver = new CookieLocaleResolver(); - localeResolver.setCookieName(LOCALE_LANGUAGE_COOKIE); - /** set default locale **/ - localeResolver.setDefaultLocale(Locale.US); - /** set cookie max age **/ - localeResolver.setCookieMaxAge(COOKIE_MAX_AGE); - return localeResolver; - } - - @Bean - public LocaleChangeInterceptor localeChangeInterceptor() { - LocaleChangeInterceptor lci = new LocaleChangeInterceptor(); - /** **/ - lci.setParamName("language"); - - return lci; - } - - - @Override - public void addInterceptors(InterceptorRegistry registry) { - //i18n - registry.addInterceptor(localeChangeInterceptor()); - - registry.addInterceptor(loginInterceptor()).addPathPatterns(LOGIN_INTERCEPTOR_PATH_PATTERN).excludePathPatterns(LOGIN_PATH_PATTERN,"/swagger-resources/**", "/webjars/**", "/v2/**", "/doc.html", "*.html", "/ui/**"); - } - - - @Override - public void addResourceHandlers(ResourceHandlerRegistry registry) { - registry.addResourceHandler("/static/**").addResourceLocations("classpath:/static/"); - registry.addResourceHandler("doc.html").addResourceLocations("classpath:/META-INF/resources/"); - registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/"); - registry.addResourceHandler("/ui/**").addResourceLocations("file:ui/"); - } - - @Override - public void addViewControllers(ViewControllerRegistry registry) { - registry.addViewController("/ui/").setViewName("forward:/ui/index.html"); - registry.addViewController("/").setViewName("forward:/ui/index.html"); - } - - @Override - public void addCorsMappings(CorsRegistry registry) { - registry.addMapping(PATH_PATTERN).allowedOrigins("*").allowedMethods("*"); - } - - - /** - * Turn off suffix-based content negotiation - * - * @param configurer - */ - @Override - public void configureContentNegotiation(final ContentNegotiationConfigurer configurer) { - configurer.favorPathExtension(false); - } - - - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java b/escheduler-api/src/main/java/cn/escheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java deleted file mode 100644 index e9c001cc95..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/configuration/ServiceModelToSwagger2MapperImpl.java +++ /dev/null @@ -1,509 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.configuration; - -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.Multimap; -import io.swagger.models.*; -import io.swagger.models.parameters.Parameter; -import org.apache.commons.lang3.StringUtils; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.MessageSource; -import org.springframework.context.annotation.Primary; -import org.springframework.context.i18n.LocaleContextHolder; -import org.springframework.stereotype.Component; -import springfox.documentation.service.ApiInfo; -import springfox.documentation.service.ApiListing; -import springfox.documentation.service.Documentation; -import springfox.documentation.service.ResourceListing; -import springfox.documentation.swagger2.mappers.*; - -import java.util.*; - -import static com.google.common.collect.Maps.newTreeMap; - -/** - * application configuration - */ -@Component(value = "ServiceModelToSwagger2Mapper") -@Primary -public class ServiceModelToSwagger2MapperImpl extends ServiceModelToSwagger2Mapper { - - - @Autowired - private ModelMapper modelMapper; - @Autowired - private ParameterMapper parameterMapper; - @Autowired - private SecurityMapper securityMapper; - @Autowired - private LicenseMapper licenseMapper; - @Autowired - private VendorExtensionsMapper vendorExtensionsMapper; - - @Autowired - private MessageSource messageSource; - - @Override - public Swagger mapDocumentation(Documentation from) { - - if (from == null) { - return null; - } - - Swagger swagger = new Swagger(); - - swagger.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions())); - swagger.setSchemes(mapSchemes(from.getSchemes())); - swagger.setPaths(mapApiListings(from.getApiListings())); - swagger.setHost(from.getHost()); - swagger.setDefinitions(modelsFromApiListings( from.getApiListings() ) ); - swagger.setSecurityDefinitions(securityMapper.toSecuritySchemeDefinitions(from.getResourceListing())); - ApiInfo info = fromResourceListingInfo(from); - if (info != null) { - swagger.setInfo(mapApiInfo(info)); - } - swagger.setBasePath(from.getBasePath()); - swagger.setTags(tagSetToTagList(from.getTags())); - List list2 = from.getConsumes(); - if (list2 != null) { - swagger.setConsumes(new ArrayList(list2)); - } else { - swagger.setConsumes(null); - } - List list3 = from.getProduces(); - if (list3 != null) { - swagger.setProduces(new ArrayList(list3)); - } else { - swagger.setProduces(null); - } - - return swagger; - } - - - @Override - protected Info mapApiInfo(ApiInfo from) { - - if (from == null) { - return null; - } - - Info info = new Info(); - - info.setLicense(licenseMapper.apiInfoToLicense(from)); - info.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions())); - info.setTermsOfService(from.getTermsOfServiceUrl()); - info.setContact(map(from.getContact())); - info.setDescription(from.getDescription()); - info.setVersion(from.getVersion()); - info.setTitle(from.getTitle()); - - return info; - } - - @Override - protected Contact map(springfox.documentation.service.Contact from) { - - if (from == null) { - return null; - } - - Contact contact = new Contact(); - - contact.setName(from.getName()); - contact.setUrl(from.getUrl()); - contact.setEmail(from.getEmail()); - - return contact; - } - - @Override - protected io.swagger.models.Operation mapOperation(springfox.documentation.service.Operation from) { - - if (from == null) { - return null; - } - - Locale locale = LocaleContextHolder.getLocale(); - - io.swagger.models.Operation operation = new io.swagger.models.Operation(); - - operation.setSecurity(mapAuthorizations(from.getSecurityReferences())); - operation.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions())); - operation.setDescription(messageSource.getMessage(from.getNotes(), null, from.getNotes(), locale)); - operation.setOperationId(from.getUniqueId()); - operation.setResponses(mapResponseMessages(from.getResponseMessages())); - operation.setSchemes(stringSetToSchemeList(from.getProtocol())); - Set tagsSet = new HashSet<>(1); - - if(from.getTags() != null && from.getTags().size() > 0){ - - List list = new ArrayList(tagsSet.size()); - - Iterator it = from.getTags().iterator(); - while(it.hasNext()) - { - String tag = it.next(); - list.add(StringUtils.isNotBlank(tag) ? messageSource.getMessage(tag, null, tag, locale) : " "); - } - - operation.setTags(list); - }else { - operation.setTags(null); - } - - operation.setSummary(from.getSummary()); - Set set1 = from.getConsumes(); - if (set1 != null) { - operation.setConsumes(new ArrayList(set1)); - } else { - operation.setConsumes(null); - } - - Set set2 = from.getProduces(); - if (set2 != null) { - operation.setProduces(new ArrayList(set2)); - } else { - operation.setProduces(null); - } - - - operation.setParameters(parameterListToParameterList(from.getParameters())); - if (from.getDeprecated() != null) { - operation.setDeprecated(Boolean.parseBoolean(from.getDeprecated())); - } - - return operation; - } - - @Override - protected Tag mapTag(springfox.documentation.service.Tag from) { - - if (from == null) { - return null; - } - - Locale locale = LocaleContextHolder.getLocale(); - - Tag tag = new Tag(); - - tag.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions())); - tag.setName(messageSource.getMessage(from.getName(), null, from.getName(), locale)); - tag.setDescription(from.getDescription()); - - return tag; - } - - - private ApiInfo fromResourceListingInfo(Documentation documentation) { - - if (documentation == null) { - return null; - } - ResourceListing resourceListing = documentation.getResourceListing(); - if (resourceListing == null) { - return null; - } - ApiInfo info = resourceListing.getInfo(); - if (info == null) { - return null; - } - return info; - } - - protected List tagSetToTagList(Set set) { - - if (set == null) { - return null; - } - - List list = new ArrayList(set.size()); - for (springfox.documentation.service.Tag tag : set) { - list.add(mapTag(tag)); - } - - return list; - } - - protected List stringSetToSchemeList(Set set) { - if (set == null) { - return null; - } - - List list = new ArrayList(set.size()); - for (String string : set) { - list.add(Enum.valueOf(Scheme.class, string)); - } - - return list; - } - - protected List parameterListToParameterList(List list) { - if (list == null) { - return null; - } - - List list1 = new ArrayList(list.size()); - - Locale locale = LocaleContextHolder.getLocale(); - - for (springfox.documentation.service.Parameter param : list) { - String description = messageSource.getMessage(param.getDescription(), null, param.getDescription(), locale); - - springfox.documentation.service.Parameter parameter = new springfox.documentation.service.Parameter(param.getName(),description,param.getDefaultValue(),param.isRequired(),param.isAllowMultiple(),param.isAllowEmptyValue(),param.getModelRef(),param.getType(),param.getAllowableValues(),param.getParamType(),param.getParamAccess(),param.isHidden(),param.getPattern(),param.getCollectionFormat(),param.getOrder(),param.getScalarExample(),param.getExamples() ,param.getVendorExtentions()); - list1.add(parameterMapper.mapParameter(parameter)); - } - - return list1; - } - - - Map modelsFromApiListings(Multimap apiListings) { - Map definitions = newTreeMap(); - for (ApiListing each : apiListings.values()) { - definitions.putAll(each.getModels()); - } - return modelMapper.mapModels(definitions); - } - - - - - - -// -// -// -// private static final VendorExtensionsMapper vendorMapper = new VendorExtensionsMapper(); -// -// -// -// public Parameter mapParameter(springfox.documentation.service.Parameter source) { -// Parameter bodyParameter = bodyParameter(source); -// return SerializableParameterFactories.create(source).or(bodyParameter); -// } -// -// private Parameter bodyParameter(springfox.documentation.service.Parameter source) { -// BodyParameter parameter = new BodyParameter() -// .description(source.getDescription()) -// .name(source.getName()) -// .schema(fromModelRef(source.getModelRef())); -// parameter.setIn(source.getParamType()); -// parameter.setAccess(source.getParamAccess()); -// parameter.setPattern(source.getPattern()); -// parameter.setRequired(source.isRequired()); -// parameter.getVendorExtensions().putAll(vendorMapper.mapExtensions(source.getVendorExtentions())); -// for (Map.Entry> each : source.getExamples().asMap().entrySet()) { -// Optional example = FluentIterable.from(each.getValue()).first(); -// if (example.isPresent() && example.get().getValue() != null) { -// parameter.addExample(each.getKey(), String.valueOf(example.get().getValue())); -// } -// } -// -// //TODO: swagger-core Body parameter does not have an enum property -// return parameter; -// } -// -// Model fromModelRef(ModelReference modelRef) { -// if (modelRef.isCollection()) { -// if (modelRef.getItemType().equals("byte")) { -// ModelImpl baseModel = new ModelImpl(); -// baseModel.setType("string"); -// baseModel.setFormat("byte"); -// return maybeAddAllowableValuesToParameter(baseModel, modelRef.getAllowableValues()); -// } else if (modelRef.getItemType().equals("file")) { -// ArrayModel files = new ArrayModel(); -// files.items(new FileProperty()); -// return files; -// } -// ModelReference itemModel = modelRef.itemModel().get(); -// return new ArrayModel() -// .items(maybeAddAllowableValues(itemTypeProperty(itemModel), itemModel.getAllowableValues())); -// } -// if (modelRef.isMap()) { -// ModelImpl baseModel = new ModelImpl(); -// ModelReference itemModel = modelRef.itemModel().get(); -// baseModel.additionalProperties( -// maybeAddAllowableValues( -// itemTypeProperty(itemModel), -// itemModel.getAllowableValues())); -// return baseModel; -// } -// if (isBaseType(modelRef.getType())) { -// Property property = property(modelRef.getType()); -// ModelImpl baseModel = new ModelImpl(); -// baseModel.setType(property.getType()); -// baseModel.setFormat(property.getFormat()); -// return maybeAddAllowableValuesToParameter(baseModel, modelRef.getAllowableValues()); -// -// } -// return new RefModel(modelRef.getType()); -// } -// -// -// private static class Properties { -// private static final Map> typeFactory -// = ImmutableMap.>builder() -// .put("int", newInstanceOf(IntegerProperty.class)) -// .put("long", newInstanceOf(LongProperty.class)) -// .put("float", newInstanceOf(FloatProperty.class)) -// .put("double", newInstanceOf(DoubleProperty.class)) -// .put("string", newInstanceOf(StringProperty.class)) -// .put("boolean", newInstanceOf(BooleanProperty.class)) -// .put("date", newInstanceOf(DateProperty.class)) -// .put("date-time", newInstanceOf(DateTimeProperty.class)) -// .put("bigdecimal", newInstanceOf(DecimalProperty.class)) -// .put("biginteger", newInstanceOf(BaseIntegerProperty.class)) -// .put("uuid", newInstanceOf(UUIDProperty.class)) -// .put("object", newInstanceOf(ObjectProperty.class)) -// .put("byte", bytePropertyFactory()) -// .put("__file", filePropertyFactory()) -// .build(); -// -// private Properties() { -// throw new UnsupportedOperationException(); -// } -// -// public static Property property(final String typeName) { -// String safeTypeName = nullToEmpty(typeName); -// Function> propertyLookup -// = forMap(typeFactory, voidOrRef(safeTypeName)); -// return propertyLookup.apply(safeTypeName.toLowerCase()).apply(safeTypeName); -// } -// -// public static Property property(final ModelReference modelRef) { -// if (modelRef.isMap()) { -// return new MapProperty(property(modelRef.itemModel().get())); -// } else if (modelRef.isCollection()) { -// if ("byte".equals(modelRef.itemModel().transform(toTypeName()).or(""))) { -// return new ByteArrayProperty(); -// } -// return new ArrayProperty( -// maybeAddAllowableValues(itemTypeProperty(modelRef.itemModel().get()), modelRef.getAllowableValues())); -// } -// return property(modelRef.getType()); -// } -// -// private static Function toTypeName() { -// return new Function() { -// @Override -// public String apply(ModelReference input) { -// return input.getType(); -// } -// }; -// } -// -// public static Property itemTypeProperty(ModelReference paramModel) { -// if (paramModel.isCollection()) { -// return new ArrayProperty( -// maybeAddAllowableValues(itemTypeProperty(paramModel.itemModel().get()), paramModel.getAllowableValues())); -// } -// return property(paramModel.getType()); -// } -// -// private static Function newInstanceOf(final Class clazz) { -// return new Function() { -// @Override -// public T apply(String input) { -// try { -// return clazz.newInstance(); -// } catch (Exception e) { -// //This is bad! should never come here -// throw new IllegalStateException(e); -// } -// } -// }; -// } -// -// static Ordering defaultOrdering(Map properties) { -// return Ordering.from(byPosition(properties)).compound(byName()); -// } -// -// private static Function voidOrRef(final String typeName) { -// return new Function() { -// @Override -// public Property apply(String input) { -// if (typeName.equalsIgnoreCase("void")) { -// return null; -// } -// return new RefProperty(typeName); -// } -// }; -// } -// -// private static Function bytePropertyFactory() { -// return new Function() { -// @Override -// public Property apply(String input) { -// final IntegerProperty integerProperty = new IntegerProperty(); -// integerProperty.setFormat("int32"); -// integerProperty.setMaximum(BigDecimal.valueOf(Byte.MAX_VALUE)); -// integerProperty.setMinimum(BigDecimal.valueOf(Byte.MIN_VALUE)); -// return integerProperty; -// } -// }; -// } -// -// private static Function filePropertyFactory() { -// return new Function() { -// @Override -// public Property apply(String input) { -// return new FileProperty(); -// } -// }; -// } -// -// private static Comparator byName() { -// return new Comparator() { -// @Override -// public int compare(String first, String second) { -// return first.compareTo(second); -// } -// }; -// } -// -// private static Comparator byPosition(final Map modelProperties) { -// return new Comparator() { -// @Override -// public int compare(String first, String second) { -// ModelProperty p1 = modelProperties.get(first); -// ModelProperty p2 = modelProperties.get(second); -// return Ints.compare(p1.getPosition(), p2.getPosition()); -// } -// }; -// } -// -// static Predicate> voidProperties() { -// return new Predicate>() { -// @Override -// public boolean apply(Map.Entry input) { -// return isVoid(input.getValue().getType()) -// || collectionOfVoid(input.getValue().getType()) -// || arrayTypeOfVoid(input.getValue().getType().getArrayElementType()); -// } -// }; -// } -// -// private static boolean arrayTypeOfVoid(ResolvedType arrayElementType) { -// return arrayElementType != null && isVoid(arrayElementType); -// } -// -// private static boolean collectionOfVoid(ResolvedType type) { -// return isContainerType(type) && isVoid(collectionElementType(type)); -// } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/configuration/SwaggerConfig.java b/escheduler-api/src/main/java/cn/escheduler/api/configuration/SwaggerConfig.java deleted file mode 100644 index fb332c5ac9..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/configuration/SwaggerConfig.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.configuration; - -import com.github.xiaoymin.swaggerbootstrapui.annotations.EnableSwaggerBootstrapUI; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; -import springfox.documentation.builders.ApiInfoBuilder; -import springfox.documentation.builders.PathSelectors; -import springfox.documentation.builders.RequestHandlerSelectors; -import springfox.documentation.service.ApiInfo; -import springfox.documentation.spi.DocumentationType; -import springfox.documentation.spring.web.plugins.Docket; -import springfox.documentation.swagger2.annotations.EnableSwagger2; - -/** - * - * swager2 config class
- * - */ -@Configuration -@EnableSwagger2 -@EnableSwaggerBootstrapUI -public class SwaggerConfig implements WebMvcConfigurer { - - @Bean - public Docket createRestApi() { - return new Docket(DocumentationType.SWAGGER_2).apiInfo(apiInfo()).select() - .apis(RequestHandlerSelectors.basePackage("cn.escheduler.api.controller")).paths(PathSelectors.any()) - .build(); - } - - private ApiInfo apiInfo() { - return new ApiInfoBuilder().title("Easy Scheduler Api Docs").description("Easy Scheduler Api Docs") - .build(); - } - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/AccessTokenController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/AccessTokenController.java deleted file mode 100644 index 2e1876eb20..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/AccessTokenController.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.AccessTokenService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - - -/** - * access token controller - */ -@Api(tags = "ACCESS_TOKEN_TAG", position = 1) -@RestController -@RequestMapping("/access-token") -public class AccessTokenController extends BaseController{ - - - private static final Logger logger = LoggerFactory.getLogger(AccessTokenController.class); - - - @Autowired - private AccessTokenService accessTokenService; - - /** - * create token - * @param loginUser - * @return - */ - @ApiIgnore - @PostMapping(value = "/create") - @ResponseStatus(HttpStatus.CREATED) - public Result createToken(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "expireTime") String expireTime, - @RequestParam(value = "token") String token){ - logger.info("login user {}, create token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), - userId,expireTime,token); - - try { - Map result = accessTokenService.createToken(userId, expireTime, token); - return returnDataList(result); - }catch (Exception e){ - logger.error(CREATE_ACCESS_TOKEN_ERROR.getMsg(),e); - return error(CREATE_ACCESS_TOKEN_ERROR.getCode(), CREATE_ACCESS_TOKEN_ERROR.getMsg()); - } - } - - /** - * create token - * @param loginUser - * @return - */ - @ApiIgnore - @PostMapping(value = "/generate") - @ResponseStatus(HttpStatus.CREATED) - public Result generateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "expireTime") String expireTime){ - logger.info("login user {}, generate token , userId : {} , token expire time : {}",loginUser,userId,expireTime); - try { - Map result = accessTokenService.generateToken(userId, expireTime); - return returnDataList(result); - }catch (Exception e){ - logger.error(GENERATE_TOKEN_ERROR.getMsg(),e); - return error(GENERATE_TOKEN_ERROR.getCode(), GENERATE_TOKEN_ERROR.getMsg()); - } - } - - /** - * query access token list paging - * - * @param loginUser - * @param pageNo - * @param searchVal - * @param pageSize - * @return - */ - @ApiOperation(value = "queryAccessTokenList", notes= "QUERY_ACCESS_TOKEN_LIST_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") - }) - @GetMapping(value="/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryAccessTokenList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ - logger.info("login user {}, list access token paging, pageNo: {}, searchVal: {}, pageSize: {}", - loginUser.getUserName(),pageNo,searchVal,pageSize); - try{ - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = accessTokenService.queryAccessTokenList(loginUser, searchVal, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg(),e); - return error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getCode(),QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg()); - } - } - - /** - * delete access token by id - * @param loginUser - * @param id - * @return - */ - @ApiIgnore - @PostMapping(value = "/delete") - @ResponseStatus(HttpStatus.OK) - public Result delAccessTokenById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id) { - logger.info("login user {}, delete access token, id: {},", loginUser.getUserName(), id); - try { - Map result = accessTokenService.delAccessTokenById(loginUser, id); - return returnDataList(result); - }catch (Exception e){ - logger.error(DELETE_USER_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_USER_BY_ID_ERROR.getCode(), Status.DELETE_USER_BY_ID_ERROR.getMsg()); - } - } - - - /** - * update token - * @param loginUser - * @return - */ - @ApiIgnore - @PostMapping(value = "/update") - @ResponseStatus(HttpStatus.CREATED) - public Result updateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "expireTime") String expireTime, - @RequestParam(value = "token") String token){ - logger.info("login user {}, update token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), - userId,expireTime,token); - - try { - Map result = accessTokenService.updateToken(id,userId, expireTime, token); - return returnDataList(result); - }catch (Exception e){ - logger.error(CREATE_ACCESS_TOKEN_ERROR.getMsg(),e); - return error(CREATE_ACCESS_TOKEN_ERROR.getCode(), CREATE_ACCESS_TOKEN_ERROR.getMsg()); - } - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/AlertGroupController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/AlertGroupController.java deleted file mode 100644 index fd9f1c47f1..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/AlertGroupController.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.service.AlertGroupService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.AlertType; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.HashMap; -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - -/** - * alert group controller - */ -@Api(tags = "ALERT_GROUP_TAG", position = 1) -@RestController -@RequestMapping("alert-group") -public class AlertGroupController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class); - - @Autowired - private AlertGroupService alertGroupService; - - - /** - * create alert group - * @param loginUser - * @param groupName - * @param groupType - * @param desc - * @return - */ - @ApiOperation(value = "createAlertgroup", notes= "CREATE_ALERT_GROUP_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"), - @ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType ="AlertType"), - @ApiImplicitParam(name = "desc", value = "DESC", dataType ="String") - }) - @PostMapping(value = "/create") - @ResponseStatus(HttpStatus.CREATED) - public Result createAlertgroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "groupName") String groupName, - @RequestParam(value = "groupType") AlertType groupType, - @RequestParam(value = "desc",required = false) String desc) { - logger.info("loginUser user {}, create alertgroup, groupName: {}, groupType: {}, desc: {}", - loginUser.getUserName(), groupName, groupType,desc); - try { - Map result = alertGroupService.createAlertgroup(loginUser, groupName, groupType,desc); - return returnDataList(result); - }catch (Exception e){ - logger.error(CREATE_ALERT_GROUP_ERROR.getMsg(),e); - return error(CREATE_ALERT_GROUP_ERROR.getCode(),CREATE_ALERT_GROUP_ERROR.getMsg()); - } - } - - /** - * alert group list - * @param loginUser - * @return - */ - @ApiOperation(value = "list", notes= "QUERY_ALERT_GROUP_LIST_NOTES") - @GetMapping(value = "/list") - @ResponseStatus(HttpStatus.OK) - public Result list(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - logger.info("login user {}, query all alertGroup", - loginUser.getUserName()); - try{ - HashMap result = alertGroupService.queryAlertgroup(); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_ALL_ALERTGROUP_ERROR.getMsg(),e); - return error(QUERY_ALL_ALERTGROUP_ERROR.getCode(),QUERY_ALL_ALERTGROUP_ERROR.getMsg()); - } - } - - /** - * paging query alarm group list - * - * @param loginUser - * @param pageNo - * @param searchVal - * @param pageSize - * @return - */ - @ApiOperation(value = "queryTaskListPaging", notes= "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") - }) - @GetMapping(value="/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ - logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", - loginUser.getUserName(),pageNo,searchVal,pageSize); - try{ - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != SUCCESS){ - return returnDataListPaging(result); - } - - searchVal = ParameterUtils.handleEscapes(searchVal); - result = alertGroupService.listPaging(loginUser, searchVal, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(LIST_PAGING_ALERT_GROUP_ERROR.getMsg(),e); - return error(LIST_PAGING_ALERT_GROUP_ERROR.getCode(),LIST_PAGING_ALERT_GROUP_ERROR.getMsg()); - } - } - - /** - * updateProcessInstance alert group - * @param loginUser - * @param id - * @param groupName - * @param groupType - * @param desc - * @return - */ - @ApiOperation(value = "updateAlertgroup", notes= "UPDATE_ALERT_GROUP_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100"), - @ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"), - @ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType ="AlertType"), - @ApiImplicitParam(name = "desc", value = "DESC", dataType ="String") - }) - @PostMapping(value = "/update") - @ResponseStatus(HttpStatus.OK) - public Result updateAlertgroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id, - @RequestParam(value = "groupName") String groupName, - @RequestParam(value = "groupType") AlertType groupType, - @RequestParam(value = "desc",required = false) String desc) { - logger.info("login user {}, updateProcessInstance alertgroup, groupName: {}, groupType: {}, desc: {}", - loginUser.getUserName(), groupName, groupType,desc); - try { - Map result = alertGroupService.updateAlertgroup(loginUser, id, groupName, groupType, desc); - return returnDataList(result); - - }catch (Exception e){ - logger.error(UPDATE_ALERT_GROUP_ERROR.getMsg(),e); - return error(UPDATE_ALERT_GROUP_ERROR.getCode(),UPDATE_ALERT_GROUP_ERROR.getMsg()); - } - } - - /** - * delete alert group by id - * @param loginUser - * @param id - * @return - */ - @ApiOperation(value = "delAlertgroupById", notes= "DELETE_ALERT_GROUP_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100") - }) - @PostMapping(value = "/delete") - @ResponseStatus(HttpStatus.OK) - public Result delAlertgroupById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id) { - logger.info("login user {}, delete AlertGroup, id: {},", loginUser.getUserName(), id); - try { - Map result = alertGroupService.delAlertgroupById(loginUser, id); - return returnDataList(result); - - }catch (Exception e){ - logger.error(DELETE_ALERT_GROUP_ERROR.getMsg(),e); - return error(DELETE_ALERT_GROUP_ERROR.getCode(),DELETE_ALERT_GROUP_ERROR.getMsg()); - } - } - - - /** - * check alert group exist - * @param loginUser - * @param groupName - * @return - */ - @ApiOperation(value = "verifyGroupName", notes= "VERIFY_ALERT_GROUP_NAME_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"), - }) - @GetMapping(value = "/verify-group-name") - @ResponseStatus(HttpStatus.OK) - public Result verifyGroupName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="groupName") String groupName - ) { - logger.info("login user {}, verfiy group name: {}", - loginUser.getUserName(),groupName); - - return alertGroupService.verifyGroupName(loginUser, groupName); - } - - /** - * grant user - * - * @param loginUser - * @param userIds - * @return - */ - @ApiOperation(value = "grantUser", notes= "GRANT_ALERT_GROUP_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100"), - @ApiImplicitParam(name = "userIds", value = "USER_IDS", required = true, dataType = "String") - }) - @PostMapping(value = "/grant-user") - @ResponseStatus(HttpStatus.OK) - public Result grantUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "alertgroupId") int alertgroupId, - @RequestParam(value = "userIds") String userIds) { - logger.info("login user {}, grant user, alertGroupId: {},userIds : {}", loginUser.getUserName(), alertgroupId,userIds); - try { - Map result = alertGroupService.grantUser(loginUser, alertgroupId, userIds); - return returnDataList(result); - - }catch (Exception e){ - logger.error(ALERT_GROUP_GRANT_USER_ERROR.getMsg(),e); - return error(ALERT_GROUP_GRANT_USER_ERROR.getCode(),ALERT_GROUP_GRANT_USER_ERROR.getMsg()); - } - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/BaseController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/BaseController.java deleted file mode 100644 index 64e3b756b5..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/BaseController.java +++ /dev/null @@ -1,272 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.api.utils.Result; -import cn.escheduler.dao.entity.Resource; -import org.apache.commons.lang3.StringUtils; - -import javax.servlet.http.HttpServletRequest; -import java.text.MessageFormat; -import java.util.HashMap; -import java.util.Map; - -import static cn.escheduler.common.Constants.*; - -/** - * base controller - */ -public class BaseController { - - /** - * check params - * - * @param pageNo - * @param pageSize - * @return - */ - public Map checkPageParams(int pageNo, int pageSize) { - Map result = new HashMap<>(2); - Status resultEnum = Status.SUCCESS; - String msg = Status.SUCCESS.getMsg(); - if (pageNo <= 0) { - resultEnum = Status.REQUEST_PARAMS_NOT_VALID_ERROR; - msg = MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), Constants.PAGE_NUMBER); - } else if (pageSize <= 0) { - resultEnum = Status.REQUEST_PARAMS_NOT_VALID_ERROR; - msg = MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), Constants.PAGE_SIZE); - } - result.put(Constants.STATUS, resultEnum); - result.put(Constants.MSG, msg); - return result; - } - - /** - * get ip address in the http request - * - * @param request - * @return client ip address - */ - public static String getClientIpAddress(HttpServletRequest request) { - String clientIp = request.getHeader(HTTP_X_FORWARDED_FOR); - - if (StringUtils.isNotEmpty(clientIp) && !StringUtils.equalsIgnoreCase(HTTP_HEADER_UNKNOWN, clientIp)) { - int index = clientIp.indexOf(COMMA); - if (index != -1) { - return clientIp.substring(0, index); - } else { - return clientIp; - } - } - - clientIp = request.getHeader(HTTP_X_REAL_IP); - if (StringUtils.isNotEmpty(clientIp) && !StringUtils.equalsIgnoreCase(HTTP_HEADER_UNKNOWN, clientIp)) { - return clientIp; - } - - return request.getRemoteAddr(); - } - - /** - * return data list - * - * @param result - * @return - */ - public Result returnDataList(Map result) { - Status status = (Status) result.get(Constants.STATUS); - if (status == Status.SUCCESS) { - String msg = Status.SUCCESS.getMsg(); - Object datalist = result.get(Constants.DATA_LIST); - return success(msg, datalist); - } else { - Integer code = status.getCode(); - String msg = (String) result.get(Constants.MSG); - return error(code, msg); - } - } - - /** - * return data list with paging - * @param result - * @return - */ - public Result returnDataListPaging(Map result) { - Status status = (Status) result.get(Constants.STATUS); - if (status == Status.SUCCESS) { - result.put(Constants.MSG, Status.SUCCESS.getMsg()); - PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); - return success(pageInfo.getLists(), pageInfo.getCurrentPage(), pageInfo.getTotalCount(), - pageInfo.getTotalPage()); - } else { - Integer code = status.getCode(); - String msg = (String) result.get(Constants.MSG); - return error(code, msg); - } - } - - /** - * success - * - * @return - */ - public Result success() { - Result result = new Result(); - result.setCode(Status.SUCCESS.getCode()); - result.setMsg(Status.SUCCESS.getMsg()); - - return result; - } - - /** - * success does not need to return data - * - * @param msg - * @return - */ - public Result success(String msg) { - Result result = new Result(); - result.setCode(Status.SUCCESS.getCode()); - result.setMsg(msg); - - return result; - } - - /** - * return data no paging - * - * @param msg - * @param list - * @return - */ - public Result success(String msg, Object list) { - Result result = getResult(msg, list); - return result; - } - - /** - * return data no paging - * - * @param list - * @return - */ - public Result success(Object list) { - Result result = getResult(Status.SUCCESS.getMsg(), list); - return result; - } - - /** - * return the data use Map format, for example, passing the value of key, value, passing a value - * eg. "/user/add" then return user name: zhangsan - * - * @param msg - * @param object - * @return - */ - public Result success(String msg, Map object) { - Result result = getResult(msg, object); - return result; - } - - /** - * return data with paging - * - * @param totalList - * @param currentPage - * @param total - * @return - */ - public Result success(Object totalList, Integer currentPage, - Integer total, Integer totalPage) { - Result result = new Result(); - result.setCode(Status.SUCCESS.getCode()); - result.setMsg(Status.SUCCESS.getMsg()); - - Map map = new HashMap<>(4); - map.put(Constants.TOTAL_LIST, totalList); - map.put(Constants.CURRENT_PAGE, currentPage); - map.put(Constants.TOTAL_PAGE, totalPage); - map.put(Constants.TOTAL, total); - result.setData(map); - return result; - } - - /** - * error handle - * - * @param code - * @param msg - * @return - */ - public Result error(Integer code, String msg) { - Result result = new Result(); - result.setCode(code); - result.setMsg(msg); - return result; - } - - /** - * put message to map - * - * @param result - * @param status - * @param statusParams - */ - protected void putMsg(Map result, Status status, Object... statusParams) { - result.put(Constants.STATUS, status); - if (statusParams != null && statusParams.length > 0) { - result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); - } else { - result.put(Constants.MSG, status.getMsg()); - } - } - - /** - * put message to result object - * - * @param result - * @param status - */ - protected void putMsg(Result result, Status status, Object... statusParams) { - result.setCode(status.getCode()); - - if (statusParams != null && statusParams.length > 0) { - result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); - } else { - result.setMsg(status.getMsg()); - } - - } - - /** - * get result - * @param msg - * @param list - * @return - */ - private Result getResult(String msg, Object list) { - Result result = new Result(); - result.setCode(Status.SUCCESS.getCode()); - result.setMsg(msg); - - result.setData(list); - return result; - } -} \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/DataAnalysisController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/DataAnalysisController.java deleted file mode 100644 index 78dcf3afcb..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/DataAnalysisController.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.service.DataAnalysisService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - -/** - * data analysis controller - */ -@Api(tags = "DATA_ANALYSIS_TAG", position = 1) -@RestController -@RequestMapping("projects/analysis") -public class DataAnalysisController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(DataAnalysisController.class); - - - @Autowired - DataAnalysisService dataAnalysisService; - - - /** - * statistical task instance status data - * - * @param loginUser - * @param projectId - * @return - */ - @ApiOperation(value = "countTaskState", notes= "COUNT_TASK_STATE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") - }) - @GetMapping(value="/task-state-count") - @ResponseStatus(HttpStatus.OK) - public Result countTaskState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="startDate", required=false) String startDate, - @RequestParam(value="endDate", required=false) String endDate, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count task state, user:{}, start date: {}, end date:{}, project id {}", - loginUser.getUserName(), startDate, endDate, projectId); - Map result = dataAnalysisService.countTaskStateByProject(loginUser,projectId, startDate, endDate); - return returnDataList(result); - }catch (Exception e){ - logger.error(TASK_INSTANCE_STATE_COUNT_ERROR.getMsg(),e); - return error(TASK_INSTANCE_STATE_COUNT_ERROR.getCode(), TASK_INSTANCE_STATE_COUNT_ERROR.getMsg()); - } - } - - /** - * statistical process instance status data - * - * @param loginUser - * @param projectId - * @return - */ - @ApiOperation(value = "countProcessInstanceState", notes= "COUNT_PROCESS_INSTANCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") - }) - @GetMapping(value="/process-state-count") - @ResponseStatus(HttpStatus.OK) - public Result countProcessInstanceState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="startDate", required=false) String startDate, - @RequestParam(value="endDate", required=false) String endDate, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id", - loginUser.getUserName(), startDate, endDate, projectId); - Map result = dataAnalysisService.countProcessInstanceStateByProject(loginUser, projectId, startDate, endDate); - return returnDataList(result); - }catch (Exception e){ - logger.error(COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg(),e); - return error(COUNT_PROCESS_INSTANCE_STATE_ERROR.getCode(), COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg()); - } - } - - /** - * statistics the process definition quantities of certain person - * - * @param loginUser - * @param projectId - * @return - */ - @ApiOperation(value = "countDefinitionByUser", notes= "COUNT_PROCESS_DEFINITION_BY_USER_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") - }) - @GetMapping(value="/define-user-count") - @ResponseStatus(HttpStatus.OK) - public Result countDefinitionByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count process definition , user:{}, project id", - loginUser.getUserName(), projectId); - Map result = dataAnalysisService.countDefinitionByUser(loginUser, projectId); - return returnDataList(result); - }catch (Exception e){ - logger.error(COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg(),e); - return error(COUNT_PROCESS_DEFINITION_USER_ERROR.getCode(), COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg()); - } - } - - - /** - * statistical command status data - * - * @param loginUser - * @param projectId - * @return - */ - @ApiOperation(value = "countCommandState", notes= "COUNT_COMMAND_STATE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") - }) - @GetMapping(value="/command-state-count") - @ResponseStatus(HttpStatus.OK) - public Result countCommandState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="startDate", required=false) String startDate, - @RequestParam(value="endDate", required=false) String endDate, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}", - loginUser.getUserName(), startDate, endDate, projectId); - Map result = dataAnalysisService.countCommandState(loginUser, projectId, startDate, endDate); - return returnDataList(result); - }catch (Exception e){ - logger.error(COMMAND_STATE_COUNT_ERROR.getMsg(),e); - return error(COMMAND_STATE_COUNT_ERROR.getCode(), COMMAND_STATE_COUNT_ERROR.getMsg()); - } - } - - /** - * queue count - * - * @param loginUser - * @param projectId - * @return - */ - @ApiOperation(value = "countQueueState", notes= "COUNT_QUEUE_STATE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") - }) - @GetMapping(value="/queue-count") - @ResponseStatus(HttpStatus.OK) - public Result countQueueState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}", - loginUser.getUserName(), projectId); - Map result = dataAnalysisService.countQueueState(loginUser, projectId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUEUE_COUNT_ERROR.getMsg(),e); - return error(QUEUE_COUNT_ERROR.getCode(), QUEUE_COUNT_ERROR.getMsg()); - } - } - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java deleted file mode 100644 index 57cb214d76..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java +++ /dev/null @@ -1,460 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.DataSourceService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.DbType; -import cn.escheduler.common.utils.CommonUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - - -/** - * data source controller - */ -@Api(tags = "DATA_SOURCE_TAG", position = 3) -@RestController -@RequestMapping("datasources") -public class DataSourceController extends BaseController { - - private static final Logger logger = LoggerFactory.getLogger(DataSourceController.class); - - @Autowired - private DataSourceService dataSourceService; - - /** - * create data source - * @param loginUser - * @param name - * @param note - * @param type - * @param host - * @param port - * @param database - * @param principal - * @param userName - * @param password - * @param other - * @return - */ - @ApiOperation(value = "createDataSource", notes= "CREATE_DATA_SOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"), - @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"), - @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"), - @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"), - @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String") - }) - @PostMapping(value = "/create") - @ResponseStatus(HttpStatus.CREATED) - public Result createDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("name") String name, - @RequestParam(value = "note", required = false) String note, - @RequestParam(value = "type") DbType type, - @RequestParam(value = "host") String host, - @RequestParam(value = "port") String port, - @RequestParam(value = "database") String database, - @RequestParam(value = "principal") String principal, - @RequestParam(value = "userName") String userName, - @RequestParam(value = "password") String password, - @RequestParam(value = "other") String other) { - logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {},port: {},database : {},principal: {},userName : {} other: {}", - loginUser.getUserName(), name, note, type, host,port,database,principal,userName,other); - try { - String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other); - Map result = dataSourceService.createDataSource(loginUser, name, note, type, parameter); - return returnDataList(result); - - } catch (Exception e) { - logger.error(CREATE_DATASOURCE_ERROR.getMsg(),e); - return error(Status.CREATE_DATASOURCE_ERROR.getCode(), Status.CREATE_DATASOURCE_ERROR.getMsg()); - } - } - - - /** - * updateProcessInstance data source - * - * @param loginUser - * @param name - * @param note - * @param type - * @param other - * @return - */ - @ApiOperation(value = "updateDataSource", notes= "UPDATE_DATA_SOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"), - @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"), - @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"), - @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"), - @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String") - }) - @PostMapping(value = "/update") - @ResponseStatus(HttpStatus.OK) - public Result updateDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("id") int id, - @RequestParam("name") String name, - @RequestParam(value = "note", required = false) String note, - @RequestParam(value = "type") DbType type, - @RequestParam(value = "host") String host, - @RequestParam(value = "port") String port, - @RequestParam(value = "database") String database, - @RequestParam(value = "principal") String principal, - @RequestParam(value = "userName") String userName, - @RequestParam(value = "password") String password, - @RequestParam(value = "other") String other) { - logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, other: {}", - loginUser.getUserName(), name, note, type, other); - try { - String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal, userName, password, other); - Map dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter); - return returnDataList(dataSource); - } catch (Exception e) { - logger.error(UPDATE_DATASOURCE_ERROR.getMsg(),e); - return error(UPDATE_DATASOURCE_ERROR.getCode(), UPDATE_DATASOURCE_ERROR.getMsg()); - } - - - } - - /** - * query data source - * - * @param loginUser - * @param id - * @return - */ - @ApiOperation(value = "queryDataSource", notes= "QUERY_DATA_SOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100") - - }) - @PostMapping(value = "/update-ui") - @ResponseStatus(HttpStatus.OK) - public Result queryDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("id") int id) { - logger.info("login user {}, query datasource: {}", - loginUser.getUserName(), id); - try { - Map result = dataSourceService.queryDataSource(id); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); - return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg()); - } - - - } - - /** - * query datasouce by type - * - * @param loginUser - * @return - */ - @ApiOperation(value = "queryDataSourceList", notes= "QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType") - }) - @GetMapping(value = "/list") - @ResponseStatus(HttpStatus.OK) - public Result queryDataSourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("type") DbType type) { - try { - Map result = dataSourceService.queryDataSourceList(loginUser, type.ordinal()); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); - return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg()); - } - } - - /** - * query datasource with paging - * - * @param loginUser - * @param searchVal - * @param pageNo - * @param pageSize - * @return - */ - @ApiOperation(value = "queryDataSourceListPaging", notes= "QUERY_DATA_SOURCE_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") - }) - @GetMapping(value = "/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryDataSourceListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageNo") Integer pageNo, - @RequestParam("pageSize") Integer pageSize) { - try { - Map result = checkPageParams(pageNo, pageSize); - if (result.get(Constants.STATUS) != Status.SUCCESS) { - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize); - return returnDataListPaging(result); - } catch (Exception e) { - logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); - return error(QUERY_DATASOURCE_ERROR.getCode(), QUERY_DATASOURCE_ERROR.getMsg()); - } - } - - /** - * connec datasource - * - * @param loginUser - * @param name - * @param note - * @param type - * @param other - * @return - */ - @ApiOperation(value = "connectDataSource", notes= "CONNECT_DATA_SOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"), - @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"), - @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"), - @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"), - @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String") - }) - @PostMapping(value = "/connect") - @ResponseStatus(HttpStatus.OK) - public Result connectDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("name") String name, - @RequestParam(value = "note", required = false) String note, - @RequestParam(value = "type") DbType type, - @RequestParam(value = "host") String host, - @RequestParam(value = "port") String port, - @RequestParam(value = "database") String database, - @RequestParam(value = "principal") String principal, - @RequestParam(value = "userName") String userName, - @RequestParam(value = "password") String password, - @RequestParam(value = "other") String other) { - logger.info("login user {}, connect datasource: {} failure, note: {}, type: {}, other: {}", - loginUser.getUserName(), name, note, type, other); - try { - String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other); - Boolean isConnection = dataSourceService.checkConnection(type, parameter); - Result result = new Result(); - - if (isConnection) { - putMsg(result, SUCCESS); - } else { - putMsg(result, CONNECT_DATASOURCE_FAILURE); - } - return result; - } catch (Exception e) { - logger.error(CONNECT_DATASOURCE_FAILURE.getMsg(),e); - return error(CONNECT_DATASOURCE_FAILURE.getCode(), CONNECT_DATASOURCE_FAILURE.getMsg()); - } - } - - /** - * connection test - * - * @param loginUser - * @return - */ - @ApiOperation(value = "connectionTest", notes= "CONNECT_DATA_SOURCE_TEST_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/connect-by-id") - @ResponseStatus(HttpStatus.OK) - public Result connectionTest(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("id") int id) { - logger.info("connection test, login user:{}, id:{}", loginUser.getUserName(), id); - - try { - Boolean isConnection = dataSourceService.connectionTest(loginUser, id); - Result result = new Result(); - - if (isConnection) { - putMsg(result, SUCCESS); - } else { - putMsg(result, CONNECTION_TEST_FAILURE); - } - return result; - } catch (Exception e) { - logger.error(CONNECTION_TEST_FAILURE.getMsg(),e); - return error(CONNECTION_TEST_FAILURE.getCode(), CONNECTION_TEST_FAILURE.getMsg()); - } - - } - - /** - * delete datasource by id - * - * @param loginUser - * @param id datasource id - * @return - */ - @ApiOperation(value = "delete", notes= "DELETE_DATA_SOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/delete") - @ResponseStatus(HttpStatus.OK) - public Result delete(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("id") int id) { - try { - logger.info("delete datasource,login user:{}, id:{}", loginUser.getUserName(), id); - return dataSourceService.delete(loginUser, id); - } catch (Exception e) { - logger.error(DELETE_DATA_SOURCE_FAILURE.getMsg(),e); - return error(DELETE_DATA_SOURCE_FAILURE.getCode(), DELETE_DATA_SOURCE_FAILURE.getMsg()); - } - } - - /** - * verify datasource name - * - * @param loginUser - * @param name - * @return - */ - @ApiOperation(value = "verifyDataSourceName", notes= "VERIFY_DATA_SOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String") - }) - @GetMapping(value = "/verify-name") - @ResponseStatus(HttpStatus.OK) - public Result verifyDataSourceName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "name") String name - ) { - logger.info("login user {}, verfiy datasource name: {}", - loginUser.getUserName(), name); - - try { - return dataSourceService.verifyDataSourceName(loginUser, name); - } catch (Exception e) { - logger.error(VERFIY_DATASOURCE_NAME_FAILURE.getMsg(),e); - return error(VERFIY_DATASOURCE_NAME_FAILURE.getCode(), VERFIY_DATASOURCE_NAME_FAILURE.getMsg()); - } - } - - - - /** - * unauthorized datasource - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "unauthDatasource", notes= "UNAUTHORIZED_DATA_SOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/unauth-datasource") - @ResponseStatus(HttpStatus.OK) - public Result unauthDatasource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("userId") Integer userId) { - try { - logger.info("unauthorized datasource, login user:{}, unauthorized userId:{}", - loginUser.getUserName(), userId); - Map result = dataSourceService.unauthDatasource(loginUser, userId); - return returnDataList(result); - } catch (Exception e) { - logger.error(UNAUTHORIZED_DATASOURCE.getMsg(),e); - return error(UNAUTHORIZED_DATASOURCE.getCode(), UNAUTHORIZED_DATASOURCE.getMsg()); - } - } - - - /** - * authorized datasource - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "authedDatasource", notes= "AUTHORIZED_DATA_SOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/authed-datasource") - @ResponseStatus(HttpStatus.OK) - public Result authedDatasource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("userId") Integer userId) { - try { - logger.info("authorized data source, login user:{}, authorized useId:{}", - loginUser.getUserName(), userId); - Map result = dataSourceService.authedDatasource(loginUser, userId); - return returnDataList(result); - } catch (Exception e) { - logger.error(AUTHORIZED_DATA_SOURCE.getMsg(),e); - return error(AUTHORIZED_DATA_SOURCE.getCode(), AUTHORIZED_DATA_SOURCE.getMsg()); - } - } - - /** - * get user info - * - * @param loginUser - * @return - */ - @ApiOperation(value = "getKerberosStartupState", notes= "GET_USER_INFO_NOTES") - @GetMapping(value="/kerberos-startup-state") - @ResponseStatus(HttpStatus.OK) - public Result getKerberosStartupState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ - logger.info("login user {},get kerberos startup state : {}", loginUser.getUserName()); - try{ - // if upload resource is HDFS and kerberos startup is true , else false - return success(Status.SUCCESS.getMsg(), CommonUtils.getKerberosStartupState()); - }catch (Exception e){ - logger.error(KERBEROS_STARTUP_STATE.getMsg(),e); - return error(Status.KERBEROS_STARTUP_STATE.getCode(), Status.KERBEROS_STARTUP_STATE.getMsg()); - } - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/ExecutorController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/ExecutorController.java deleted file mode 100644 index 098c5e8e43..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/ExecutorController.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.enums.ExecuteType; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.ExecutorService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.*; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - - -/** - * execute process controller - */ -@Api(tags = "PROCESS_INSTANCE_EXECUTOR_TAG", position = 1) -@RestController -@RequestMapping("projects/{projectName}/executors") -public class ExecutorController extends BaseController { - - private static final Logger logger = LoggerFactory.getLogger(ExecutorController.class); - - @Autowired - private ExecutorService execService; - - /** - * execute process instance - */ - @ApiOperation(value = "startProcessInstance", notes= "RUN_PROCESS_INSTANCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", required = true, dataType = "String"), - @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType ="FailureStrategy"), - @ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType ="String"), - @ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType ="TaskDependType"), - @ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType ="CommandType"), - @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE",required = true, dataType ="WarningType"), - @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID",required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "receivers", value = "RECEIVERS",dataType ="String" ), - @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC",dataType ="String" ), - @ApiImplicitParam(name = "runMode", value = "RUN_MODE",dataType ="RunMode" ), - @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority" ), - @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int",example = "100"), - @ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int",example = "100"), - }) - @PostMapping(value = "start-process-instance") - @ResponseStatus(HttpStatus.OK) - public Result startProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "processDefinitionId") int processDefinitionId, - @RequestParam(value = "scheduleTime", required = false) String scheduleTime, - @RequestParam(value = "failureStrategy", required = true) FailureStrategy failureStrategy, - @RequestParam(value = "startNodeList", required = false) String startNodeList, - @RequestParam(value = "taskDependType", required = false) TaskDependType taskDependType, - @RequestParam(value = "execType", required = false) CommandType execType, - @RequestParam(value = "warningType", required = true) WarningType warningType, - @RequestParam(value = "warningGroupId", required = false) int warningGroupId, - @RequestParam(value = "receivers", required = false) String receivers, - @RequestParam(value = "receiversCc", required = false) String receiversCc, - @RequestParam(value = "runMode", required = false) RunMode runMode, - @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, - @RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId, - @RequestParam(value = "timeout", required = false) Integer timeout) { - try { - logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, " - + "failure policy: {}, node name: {}, node dep: {}, notify type: {}, " - + "notify group id: {},receivers:{},receiversCc:{}, run mode: {},process instance priority:{}, workerGroupId: {}, timeout: {}", - loginUser.getUserName(), projectName, processDefinitionId, scheduleTime, - failureStrategy, startNodeList, taskDependType, warningType, warningGroupId,receivers,receiversCc,runMode,processInstancePriority, - workerGroupId, timeout); - - if (timeout == null) { - timeout = cn.escheduler.common.Constants.MAX_TASK_TIMEOUT; - } - - Map result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy, - startNodeList, taskDependType, warningType, - warningGroupId,receivers,receiversCc, runMode,processInstancePriority, workerGroupId, timeout); - return returnDataList(result); - } catch (Exception e) { - logger.error(START_PROCESS_INSTANCE_ERROR.getMsg(),e); - return error(Status.START_PROCESS_INSTANCE_ERROR.getCode(), Status.START_PROCESS_INSTANCE_ERROR.getMsg()); - } - } - - - /** - * do action to process instance:pause, stop, repeat, recover from pause, recover from stop - * - * @param loginUser - * @param projectName - * @param processInstanceId - * @return - */ - @ApiOperation(value = "execute", notes= "EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "executeType", value = "EXECUTE_TYPE", required = true, dataType = "ExecuteType") - }) - @PostMapping(value = "/execute") - @ResponseStatus(HttpStatus.OK) - public Result execute(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processInstanceId") Integer processInstanceId, - @RequestParam("executeType") ExecuteType executeType - ) { - try { - logger.info("execute command, login user: {}, project:{}, process instance id:{}, execute type:{}", - loginUser.getUserName(), projectName, processInstanceId, executeType.toString()); - Map result = execService.execute(loginUser, projectName, processInstanceId, executeType); - return returnDataList(result); - } catch (Exception e) { - logger.error(EXECUTE_PROCESS_INSTANCE_ERROR.getMsg(),e); - return error(EXECUTE_PROCESS_INSTANCE_ERROR.getCode(), EXECUTE_PROCESS_INSTANCE_ERROR.getMsg()); - } - } - - /** - * check process definition and all of the son process definitions is on line. - * - * @param loginUser - * @param processDefinitionId - * @return - */ - @ApiOperation(value = "startCheckProcessDefinition", notes= "START_CHECK_PROCESS_DEFINITION_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") - }) - @PostMapping(value = "/start-check") - @ResponseStatus(HttpStatus.OK) - public Result startCheckProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "processDefinitionId") int processDefinitionId) { - logger.info("login user {}, check process definition", loginUser.getUserName(), processDefinitionId); - try { - Map result = execService.startCheckByProcessDefinedId(processDefinitionId); - return returnDataList(result); - - } catch (Exception e) { - logger.error(CHECK_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(CHECK_PROCESS_DEFINITION_ERROR.getCode(), CHECK_PROCESS_DEFINITION_ERROR.getMsg()); - } - } - - /** - * query recipients and copyers by process definition ID - * - * @param loginUser - * @param processDefinitionId - * @return - */ - @ApiIgnore - @ApiOperation(value = "getReceiverCc", notes= "GET_RECEIVER_CC_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100") - - }) - @GetMapping(value = "/get-receiver-cc") - @ResponseStatus(HttpStatus.OK) - public Result getReceiverCc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "processDefinitionId",required = false) Integer processDefinitionId, - @RequestParam(value = "processInstanceId",required = false) Integer processInstanceId) { - logger.info("login user {}, get process definition receiver and cc", loginUser.getUserName()); - try { - Map result = execService.getReceiverCc(processDefinitionId,processInstanceId); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getCode(), QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg()); - } - } - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/LoggerController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/LoggerController.java deleted file mode 100644 index 0393462999..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/LoggerController.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.service.LoggerService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpHeaders; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import static cn.escheduler.api.enums.Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR; -import static cn.escheduler.api.enums.Status.QUERY_TASK_INSTANCE_LOG_ERROR; - - -/** - * log controller - */ -@Api(tags = "LOGGER_TAG", position = 13) -@RestController -@RequestMapping("/log") -public class LoggerController extends BaseController { - - private static final Logger logger = LoggerFactory.getLogger(LoggerController.class); - - - @Autowired - private LoggerService loggerService; - - /** - * query task log - */ - @ApiOperation(value = "queryLog", notes= "QUERY_TASK_INSTANCE_LOG_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "taskInstId", value = "TASK_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", dataType ="Int", example = "100"), - @ApiImplicitParam(name = "limit", value = "LIMIT", dataType ="Int", example = "100") - }) - @GetMapping(value = "/detail") - @ResponseStatus(HttpStatus.OK) - public Result queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "taskInstId") int taskInstanceId, - @RequestParam(value = "skipLineNum") int skipNum, - @RequestParam(value = "limit") int limit) { - try { - - logger.info( - "login user {}, view {} task instance log ,skipLineNum {} , limit {}", loginUser.getUserName(), taskInstanceId, skipNum, limit); - return loggerService.queryLog(taskInstanceId, skipNum, limit); - } catch (Exception e) { - logger.error(QUERY_TASK_INSTANCE_LOG_ERROR.getMsg(), e); - return error(QUERY_TASK_INSTANCE_LOG_ERROR.getCode(), QUERY_TASK_INSTANCE_LOG_ERROR.getMsg()); - } - } - - - /** - * download log file - * - * @param loginUser - * @param taskInstanceId - */ - @ApiOperation(value = "downloadTaskLog", notes= "DOWNLOAD_TASK_INSTANCE_LOG_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "taskInstId", value = "TASK_ID",dataType = "Int", example = "100") - }) - @GetMapping(value = "/download-log") - @ResponseBody - public ResponseEntity downloadTaskLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "taskInstId") int taskInstanceId) { - try { - byte[] logBytes = loggerService.getLogBytes(taskInstanceId); - return ResponseEntity - .ok() - .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + System.currentTimeMillis() + ".queryLog" + "\"") - .body(logBytes); - } catch (Exception e) { - logger.error(DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg(), e); - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg()); - } - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/LoginController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/LoginController.java deleted file mode 100644 index 250862d491..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/LoginController.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.SessionService; -import cn.escheduler.api.service.UsersService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.*; -import org.apache.commons.httpclient.HttpStatus; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import static cn.escheduler.api.enums.Status.*; - -/** - * user login controller - * - * swagger bootstrap ui docs refer : https://doc.xiaominfo.com/guide/enh-func.html - */ -@Api(tags = "LOGIN_TAG", position = 1) -@RestController -@RequestMapping("") -public class LoginController extends BaseController { - - private static final Logger logger = LoggerFactory.getLogger(LoginController.class); - - - @Autowired - private SessionService sessionService; - - @Autowired - private UsersService userService; - - - /** - * login - * - * @param userName - * @param userPassword - * @param request - * @param response - * @return - */ - @ApiOperation(value = "login", notes= "LOGIN_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"), - @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", required = true, dataType ="String") - }) - @PostMapping(value = "/login") - public Result login(@RequestParam(value = "userName") String userName, - @RequestParam(value = "userPassword") String userPassword, - HttpServletRequest request, - HttpServletResponse response) { - - try { - logger.info("login user name: {} ", userName); - - //user name check - if (StringUtils.isEmpty(userName)) { - return error(Status.USER_NAME_NULL.getCode(), - Status.USER_NAME_NULL.getMsg()); - } - - // user ip check - String ip = getClientIpAddress(request); - if (StringUtils.isEmpty(ip)) { - return error(IP_IS_EMPTY.getCode(), IP_IS_EMPTY.getMsg()); - } - - // verify username and password - User user = userService.queryUser(userName, userPassword); - - if (user == null) { - return error(Status.USER_NAME_PASSWD_ERROR.getCode(),Status.USER_NAME_PASSWD_ERROR.getMsg() - ); - } - - // create session - String sessionId = sessionService.createSession(user, ip); - - if (sessionId == null) { - return error(Status.LOGIN_SESSION_FAILED.getCode(), - Status.LOGIN_SESSION_FAILED.getMsg() - ); - } - - response.setStatus(HttpStatus.SC_OK); - response.addCookie(new Cookie(Constants.SESSION_ID, sessionId)); - - logger.info("sessionId : {}" , sessionId); - return success(LOGIN_SUCCESS.getMsg(), sessionId); - } catch (Exception e) { - logger.error(USER_LOGIN_FAILURE.getMsg(),e); - return error(USER_LOGIN_FAILURE.getCode(), USER_LOGIN_FAILURE.getMsg()); - } - } - - /** - * sign out - * - * @param loginUser - * @return - */ - @ApiOperation(value = "signOut", notes = "SIGNOUT_NOTES") - @PostMapping(value = "/signOut") - public Result signOut(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - HttpServletRequest request) { - - try { - logger.info("login user:{} sign out", loginUser.getUserName()); - String ip = getClientIpAddress(request); - sessionService.signOut(ip, loginUser); - //clear session - request.removeAttribute(Constants.SESSION_USER); - return success(); - } catch (Exception e) { - logger.error(SIGN_OUT_ERROR.getMsg(),e); - return error(SIGN_OUT_ERROR.getCode(), SIGN_OUT_ERROR.getMsg()); - } - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/MonitorController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/MonitorController.java deleted file mode 100644 index a9ae6d4c2c..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/MonitorController.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.service.MonitorService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - - -/** - * monitor controller - */ -@Api(tags = "MONITOR_TAG", position = 1) -@RestController -@RequestMapping("/monitor") -public class MonitorController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(MonitorController.class); - - @Autowired - private MonitorService monitorService; - - /** - * master list - * @param loginUser - * @return - */ - @ApiOperation(value = "listMaster", notes= "MASTER_LIST_NOTES") - @GetMapping(value = "/master/list") - @ResponseStatus(HttpStatus.OK) - public Result listMaster(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - logger.info("login user: {}, query all master", loginUser.getUserName()); - try{ - logger.info("list master, user:{}", loginUser.getUserName()); - Map result = monitorService.queryMaster(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(LIST_MASTERS_ERROR.getMsg(),e); - return error(LIST_MASTERS_ERROR.getCode(), - LIST_MASTERS_ERROR.getMsg()); - } - } - - /** - * worker list - * @param loginUser - * @return - */ - @ApiOperation(value = "listWorker", notes= "WORKER_LIST_NOTES") - @GetMapping(value = "/worker/list") - @ResponseStatus(HttpStatus.OK) - public Result listWorker(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - logger.info("login user: {}, query all workers", loginUser.getUserName()); - try{ - Map result = monitorService.queryWorker(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(LIST_WORKERS_ERROR.getMsg(),e); - return error(LIST_WORKERS_ERROR.getCode(), - LIST_WORKERS_ERROR.getMsg()); - } - } - - /** - * query database state - * @param loginUser - * @return - */ - @ApiOperation(value = "queryDatabaseState", notes= "QUERY_DATABASE_STATE_NOTES") - @GetMapping(value = "/database") - @ResponseStatus(HttpStatus.OK) - public Result queryDatabaseState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - logger.info("login user: {}, query database state", loginUser.getUserName()); - try{ - - Map result = monitorService.queryDatabaseState(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_DATABASE_STATE_ERROR.getMsg(),e); - return error(QUERY_DATABASE_STATE_ERROR.getCode(), - QUERY_DATABASE_STATE_ERROR.getMsg()); - } - } - - /** - * query zookeeper state - * @param loginUser - * @return - */ - @ApiOperation(value = "queryZookeeperState", notes= "QUERY_ZOOKEEPER_STATE_NOTES") - @GetMapping(value = "/zookeeper/list") - @ResponseStatus(HttpStatus.OK) - public Result queryZookeeperState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - logger.info("login user: {}, query zookeeper state", loginUser.getUserName()); - try{ - Map result = monitorService.queryZookeeperState(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_ZOOKEEPER_STATE_ERROR.getMsg(),e); - return error(QUERY_ZOOKEEPER_STATE_ERROR.getCode(), - QUERY_ZOOKEEPER_STATE_ERROR.getMsg()); - } - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java deleted file mode 100644 index 55a29c31ed..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java +++ /dev/null @@ -1,494 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.ProcessDefinitionService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import javax.servlet.http.HttpServletResponse; -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; -import static cn.escheduler.api.enums.Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR; - - -/** - * process definition controller - */ -@Api(tags = "PROCESS_DEFINITION_TAG", position = 2) -@RestController -@RequestMapping("projects/{projectName}/process") -public class ProcessDefinitionController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionController.class); - - @Autowired - private ProcessDefinitionService processDefinitionService; - - /** - * create process definition - * - * @param loginUser - * @param projectName - * @param name - * @param json process definition json - * @param desc - * @return - */ - @ApiOperation(value = "save", notes= "CREATE_PROCESS_DEFINITION_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), - @ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type ="String"), - @ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type ="String"), - @ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type ="String"), - @ApiImplicitParam(name = "desc", value = "PROCESS_DEFINITION_DESC", required = false, type ="String"), - }) - @PostMapping(value = "/save") - @ResponseStatus(HttpStatus.CREATED) - public Result createProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "name", required = true) String name, - @RequestParam(value = "processDefinitionJson", required = true) String json, - @RequestParam(value = "locations", required = true) String locations, - @RequestParam(value = "connects", required = true) String connects, - @RequestParam(value = "desc", required = false) String desc) { - - try { - logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " + - "process_definition_json: {}, desc: {} locations:{}, connects:{}", - loginUser.getUserName(), projectName, name, json, desc, locations, connects); - Map result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json, - desc, locations, connects); - return returnDataList(result); - } catch (Exception e) { - logger.error(CREATE_PROCESS_DEFINITION.getMsg(), e); - return error(CREATE_PROCESS_DEFINITION.getCode(), CREATE_PROCESS_DEFINITION.getMsg()); - } - } - - /** - * verify process definition name unique - * - * @param loginUser - * @param projectName - * @param name - * @return - */ - @ApiOperation(value = "verify-name", notes = "VERIFY_PROCCESS_DEFINITION_NAME_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String") - }) - @GetMapping(value = "/verify-name") - @ResponseStatus(HttpStatus.OK) - public Result verifyProccessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam(value = "name", required = true) String name){ - try { - logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}", - loginUser.getUserName(), projectName, name); - Map result = processDefinitionService.verifyProccessDefinitionName(loginUser, projectName, name); - return returnDataList(result); - }catch (Exception e){ - logger.error(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg(),e); - return error(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getCode(), Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg()); - } - } - - /** - * update process definition - * - * @param loginUser - * @param projectName - * @param name - * @param id - * @param processDefinitionJson - * @param desc - * @return - */ - @ApiOperation(value = "updateProccessDefinition", notes= "UPDATE_PROCCESS_DEFINITION_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), - @ApiImplicitParam(name = "id", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type ="String"), - @ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type ="String"), - @ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type ="String"), - @ApiImplicitParam(name = "desc", value = "PROCESS_DEFINITION_DESC", required = false, type ="String"), - }) - @PostMapping(value = "/update") - @ResponseStatus(HttpStatus.OK) - public Result updateProccessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam(value = "name", required = true) String name, - @RequestParam(value = "id", required = true) int id, - @RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson, - @RequestParam(value = "locations", required = false) String locations, - @RequestParam(value = "connects", required = false) String connects, - @RequestParam(value = "desc", required = false) String desc) { - - try { - logger.info("login user {}, update process define, project name: {}, process define name: {}, " + - "process_definition_json: {}, desc: {}, locations:{}, connects:{}", - loginUser.getUserName(), projectName, name, processDefinitionJson,desc, locations, connects); - Map result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name, - processDefinitionJson, desc, locations, connects); - return returnDataList(result); - }catch (Exception e){ - logger.error(UPDATE_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(UPDATE_PROCESS_DEFINITION_ERROR.getCode(), Status.UPDATE_PROCESS_DEFINITION_ERROR.getMsg()); - } - } - - /** - * release process definition - * - * @param loginUser - * @param projectName - * @param processId - * @param releaseState - * @return - */ - @ApiOperation(value = "releaseProccessDefinition", notes= "RELEASE_PROCCESS_DEFINITION_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), - @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "releaseState", value = "PROCESS_DEFINITION_CONNECTS", required = true, dataType = "Int", example = "100"), - }) - @PostMapping(value = "/release") - @ResponseStatus(HttpStatus.OK) - public Result releaseProccessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam(value = "processId", required = true) int processId, - @RequestParam(value = "releaseState", required = true) int releaseState) { - - try { - logger.info("login user {}, release process definition, project name: {}, release state: {}", - loginUser.getUserName(), projectName, releaseState); - Map result = processDefinitionService.releaseProcessDefinition(loginUser, projectName, processId, releaseState); - return returnDataList(result); - }catch (Exception e){ - logger.error(RELEASE_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(RELEASE_PROCESS_DEFINITION_ERROR.getCode(), Status.RELEASE_PROCESS_DEFINITION_ERROR.getMsg()); - } - } - - - /** - * query datail of process definition - * - * @param loginUser - * @param projectName - * @param processId - * @return - */ - @ApiOperation(value = "queryProccessDefinitionById", notes= "QUERY_PROCCESS_DEFINITION_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") - }) - @GetMapping(value="/select-by-id") - @ResponseStatus(HttpStatus.OK) - public Result queryProccessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam("processId") Integer processId - ){ - try{ - logger.info("query datail of process definition, login user:{}, project name:{}, process definition id:{}", - loginUser.getUserName(), projectName, processId); - Map result = processDefinitionService.queryProccessDefinitionById(loginUser, projectName, processId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getCode(), Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg()); - } - } - - - /** - * query proccess definition list - * - * @param loginUser - * @param projectName - * @return - */ - @ApiOperation(value = "queryProccessDefinitionList", notes= "QUERY_PROCCESS_DEFINITION_LIST_NOTES") - @GetMapping(value="/list") - @ResponseStatus(HttpStatus.OK) - public Result queryProccessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName - ){ - try{ - logger.info("query proccess definition list, login user:{}, project name:{}", - loginUser.getUserName(), projectName); - Map result = processDefinitionService.queryProccessDefinitionList(loginUser, projectName); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_PROCCESS_DEFINITION_LIST.getMsg(),e); - return error(QUERY_PROCCESS_DEFINITION_LIST.getCode(), QUERY_PROCCESS_DEFINITION_LIST.getMsg()); - } - } - - /** - * query proccess definition list paging - * @param loginUser - * @param projectName - * @param pageNo - * @param pageSize - * @return - */ - @ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", required = false, type = "String"), - @ApiImplicitParam(name = "userId", value = "USER_ID", required = false, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100") - }) - @GetMapping(value="/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryProcessDefinitionListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam(value = "userId", required = false, defaultValue = "0") Integer userId, - @RequestParam("pageSize") Integer pageSize){ - try{ - logger.info("query proccess definition list paging, login user:{}, project name:{}", loginUser.getUserName(), projectName); - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = processDefinitionService.queryProcessDefinitionListPaging(loginUser, projectName, searchVal, pageNo, pageSize, userId); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg(),e); - return error(QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getCode(), QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg()); - } - } - - - /** - * encapsulation treeview structure - * - * @param loginUser - * @param projectName - * @param id - * @return - */ - @ApiOperation(value = "viewTree", notes= "VIEW_TREE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType = "Int", example = "100") - }) - @GetMapping(value="/view-tree") - @ResponseStatus(HttpStatus.OK) - public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam("processId") Integer id, - @RequestParam("limit") Integer limit){ - try{ - Map result = processDefinitionService.viewTree(id, limit); - return returnDataList(result); - }catch (Exception e){ - logger.error(ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg(),e); - return error(ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getCode(),ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg()); - } - } - - - /** - * - * get tasks list by process definition id - * - * - * @param loginUser - * @param projectName - * @param processDefinitionId - * @return - */ - @ApiOperation(value = "getNodeListByDefinitionId", notes= "GET_NODE_LIST_BY_DEFINITION_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") - }) - @GetMapping(value="gen-task-list") - @ResponseStatus(HttpStatus.OK) - public Result getNodeListByDefinitionId( - @ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam("processDefinitionId") Integer processDefinitionId){ - try { - logger.info("query task node name list by definitionId, login user:{}, project name:{}, id : {}", - loginUser.getUserName(), projectName, processDefinitionId); - Map result = processDefinitionService.getTaskNodeListByDefinitionId(processDefinitionId); - return returnDataList(result); - }catch (Exception e){ - logger.error(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e); - return error(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg()); - } - } - - /** - * - * get tasks list by process definition id - * - * - * @param loginUser - * @param projectName - * @param processDefinitionIdList - * @return - */ - @ApiOperation(value = "getNodeListByDefinitionIdList", notes= "GET_NODE_LIST_BY_DEFINITION_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionIdList", value = "PROCESS_DEFINITION_ID_LIST", required = true, type = "String") - }) - @GetMapping(value="get-task-list") - @ResponseStatus(HttpStatus.OK) - public Result getNodeListByDefinitionIdList( - @ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam("processDefinitionIdList") String processDefinitionIdList){ - - try { - logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}", - loginUser.getUserName(), projectName, processDefinitionIdList); - Map result = processDefinitionService.getTaskNodeListByDefinitionIdList(processDefinitionIdList); - return returnDataList(result); - }catch (Exception e){ - logger.error(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e); - return error(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg()); - } - } - - /** - * delete process definition by id - * - * @param loginUser - * @param projectName - * @param processDefinitionId - * @return - */ - @ApiOperation(value = "deleteProcessDefinitionById", notes= "DELETE_PROCESS_DEFINITION_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100") - }) - @GetMapping(value="/delete") - @ResponseStatus(HttpStatus.OK) - public Result deleteProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processDefinitionId") Integer processDefinitionId - ){ - try{ - logger.info("delete process definition by id, login user:{}, project name:{}, process definition id:{}", - loginUser.getUserName(), projectName, processDefinitionId); - Map result = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId); - return returnDataList(result); - }catch (Exception e){ - logger.error(DELETE_PROCESS_DEFINE_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getCode(), Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getMsg()); - } - } - - /** - * batch delete process definition by ids - * - * @param loginUser - * @param projectName - * @param processDefinitionIds - * @return - */ - @ApiOperation(value = "batchDeleteProcessDefinitionByIds", notes= "BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", type = "String") - }) - @GetMapping(value="/batch-delete") - @ResponseStatus(HttpStatus.OK) - public Result batchDeleteProcessDefinitionByIds(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processDefinitionIds") String processDefinitionIds - ){ - try{ - logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}", - loginUser.getUserName(), projectName, processDefinitionIds); - Map result = processDefinitionService.batchDeleteProcessDefinitionByIds(loginUser, projectName, processDefinitionIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(),e); - return error(Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg()); - } - } - - /** - * export process definition by id - * - * @param loginUser - * @param projectName - * @param processDefinitionId - * @return - */ - @ApiOperation(value = "exportProcessDefinitionById", notes= "EXPORT_PROCCESS_DEFINITION_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") - }) - @GetMapping(value="/export") - @ResponseBody - public void exportProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @PathVariable String projectName, - @RequestParam("processDefinitionId") Integer processDefinitionId, - HttpServletResponse response){ - try{ - logger.info("export process definition by id, login user:{}, project name:{}, process definition id:{}", - loginUser.getUserName(), projectName, processDefinitionId); - processDefinitionService.exportProcessDefinitionById(loginUser, projectName, processDefinitionId,response); - }catch (Exception e){ - logger.error(EXPORT_PROCESS_DEFINE_BY_ID_ERROR.getMsg(),e); - } - } - - - - /** - * query proccess definition all by project id - * - * @param loginUser - * @return - */ - @ApiOperation(value = "queryProccessDefinitionAllByProjectId", notes= "QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES") - @GetMapping(value="/queryProccessDefinitionAllByProjectId") - @ResponseStatus(HttpStatus.OK) - public Result queryProccessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("projectId") Integer projectId){ - try{ - logger.info("query proccess definition list, login user:{}, project id:{}", - loginUser.getUserName(),projectId); - Map result = processDefinitionService.queryProccessDefinitionAllByProjectId(projectId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_PROCCESS_DEFINITION_LIST.getMsg(),e); - return error(QUERY_PROCCESS_DEFINITION_LIST.getCode(), QUERY_PROCCESS_DEFINITION_LIST.getMsg()); - } - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java deleted file mode 100644 index 83170d655c..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java +++ /dev/null @@ -1,367 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.ProcessInstanceService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.Flag; -import cn.escheduler.common.queue.ITaskQueue; -import cn.escheduler.common.queue.TaskQueueFactory; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - -/** - * process instance controller - */ -@Api(tags = "PROCESS_INSTANCE_TAG", position = 10) -@RestController -@RequestMapping("projects/{projectName}/instance") -public class ProcessInstanceController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceController.class); - - - @Autowired - ProcessInstanceService processInstanceService; - - /** - * query process instance list paging - * - * @param loginUser - * @param projectName - * @param pageNo - * @param pageSize - * @return - */ - @ApiOperation(value = "queryProcessInstanceList", notes= "QUERY_PROCESS_INSTANCE_LIST_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), - @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"), - @ApiImplicitParam(name = "host", value = "HOST", type ="String"), - @ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", type ="String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") - }) - @GetMapping(value="list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryProcessInstanceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "processDefinitionId", required = false, defaultValue = "0") Integer processDefinitionId, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam(value = "stateType", required = false) ExecutionStatus stateType, - @RequestParam(value = "host", required = false) String host, - @RequestParam(value = "startDate", required = false) String startTime, - @RequestParam(value = "endDate", required = false) String endTime, - @RequestParam("pageNo") Integer pageNo, - @RequestParam("pageSize") Integer pageSize){ - try{ - logger.info("query all process instance list, login user:{},project name:{}, define id:{}," + - "search value:{},state type:{},host:{},start time:{}, end time:{},page number:{}, page size:{}", - loginUser.getUserName(), projectName, processDefinitionId, searchVal, stateType,host, - startTime, endTime, pageNo, pageSize); - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = processInstanceService.queryProcessInstanceList( - loginUser, projectName, processDefinitionId, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg()); - } - } - - /** - * query task list by process instance id - * - * @param loginUser - * @param projectName - * @param processInstanceId - * @return - */ - @ApiOperation(value = "queryTaskListByProcessId", notes= "QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") - }) - @GetMapping(value="/task-list-by-process-id") - @ResponseStatus(HttpStatus.OK) - public Result queryTaskListByProcessId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processInstanceId") Integer processInstanceId - ) { - try{ - logger.info("query task instance list by process instance id, login user:{}, project name:{}, process instance id:{}", - loginUser.getUserName(), projectName, processInstanceId); - Map result = processInstanceService.queryTaskListByProcessId(loginUser, projectName, processInstanceId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg(),e); - return error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getCode(), QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg()); - } - } - - /** - * update process instance - * - * @param loginUser - * @param projectName - * @param processInstanceJson - * @param processInstanceId - * @param scheduleTime - * @param syncDefine - * @param flag - * @return - */ - @ApiOperation(value = "updateProcessInstance", notes= "UPDATE_PROCESS_INSTANCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processInstanceJson", value = "PROCESS_INSTANCE_JSON", type = "String"), - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", type = "String"), - @ApiImplicitParam(name = "syncDefine", value = "SYNC_DEFINE", type = "Boolean"), - @ApiImplicitParam(name = "locations", value = "PROCESS_INSTANCE_LOCATIONS", type = "String"), - @ApiImplicitParam(name = "connects", value = "PROCESS_INSTANCE_CONNECTS", type = "String"), - @ApiImplicitParam(name = "flag", value = "RECOVERY_PROCESS_INSTANCE_FLAG", type = "Flag"), - }) - @PostMapping(value="/update") - @ResponseStatus(HttpStatus.OK) - public Result updateProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam( value = "processInstanceJson", required = false) String processInstanceJson, - @RequestParam( value = "processInstanceId") Integer processInstanceId, - @RequestParam( value = "scheduleTime", required = false) String scheduleTime, - @RequestParam( value = "syncDefine", required = true) Boolean syncDefine, - @RequestParam(value = "locations", required = false) String locations, - @RequestParam(value = "connects", required = false) String connects, - @RequestParam( value = "flag", required = false) Flag flag - ){ - try{ - logger.info("updateProcessInstance process instance, login user:{}, project name:{}, process instance json:{}," + - "process instance id:{}, schedule time:{}, sync define:{}, flag:{}, locations:{}, connects:{}", - loginUser.getUserName(), projectName, processInstanceJson, processInstanceId, scheduleTime, - syncDefine, flag, locations, connects); - Map result = processInstanceService.updateProcessInstance(loginUser, projectName, - processInstanceId, processInstanceJson, scheduleTime, syncDefine, flag, locations, connects); - return returnDataList(result); - }catch (Exception e){ - logger.error(UPDATE_PROCESS_INSTANCE_ERROR.getMsg(),e); - return error(Status.UPDATE_PROCESS_INSTANCE_ERROR.getCode(), Status.UPDATE_PROCESS_INSTANCE_ERROR.getMsg()); - } - } - - /** - * query process instance by id - * - * @param loginUser - * @param projectName - * @param processInstanceId - * @return - */ - @ApiOperation(value = "queryProcessInstanceById", notes= "QUERY_PROCESS_INSTANCE_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") - }) - @GetMapping(value="/select-by-id") - @ResponseStatus(HttpStatus.OK) - public Result queryProcessInstanceById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processInstanceId") Integer processInstanceId - ){ - try{ - logger.info("query process instance detail by id, login user:{},project name:{}, process instance id:{}", - loginUser.getUserName(), projectName, processInstanceId); - Map result = processInstanceService.queryProcessInstanceById(loginUser, projectName, processInstanceId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e); - return error(Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg()); - } - } - - /** - * delete process instance by id, at the same time, - * delete task instance and their mapping relation data - * - * @param loginUser - * @param projectName - * @param processInstanceId - * @return - */ - @ApiOperation(value = "deleteProcessInstanceById", notes= "DELETE_PROCESS_INSTANCE_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") - }) - @GetMapping(value="/delete") - @ResponseStatus(HttpStatus.OK) - public Result deleteProcessInstanceById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processInstanceId") Integer processInstanceId - ){ - try{ - logger.info("delete process instance by id, login user:{}, project name:{}, process instance id:{}", - loginUser.getUserName(), projectName, processInstanceId); - // task queue - ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); - Map result = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue); - return returnDataList(result); - }catch (Exception e){ - logger.error(DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg()); - } - } - - /** - * query sub process instance detail info by task id - * - * @param loginUser - * @param projectName - * @param taskId - * @return - */ - @ApiOperation(value = "querySubProcessInstanceByTaskId", notes= "QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "taskId", value = "TASK_ID", dataType = "Int", example = "100") - }) - @GetMapping(value="/select-sub-process") - @ResponseStatus(HttpStatus.OK) - public Result querySubProcessInstanceByTaskId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("taskId") Integer taskId){ - try{ - Map result = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, taskId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg(),e); - return error(Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getCode(), Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg()); - } - } - - /** - * query parent process instance detail info by sub process instance id - * - * @param loginUser - * @param projectName - * @param subId - * @return - */ - @ApiOperation(value = "queryParentInstanceBySubId", notes= "QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "subId", value = "SUB_PROCESS_INSTANCE_ID", dataType = "Int", example = "100") - }) - @GetMapping(value="/select-parent-process") - @ResponseStatus(HttpStatus.OK) - public Result queryParentInstanceBySubId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("subId") Integer subId){ - try{ - Map result = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, subId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg(),e); - return error(Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getCode(), Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg()); - } - } - - /** - * query process instance global variables and local variables - * - * @param loginUser - * @param processInstanceId - * @return - */ - @ApiOperation(value = "viewVariables", notes= "QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") - }) - @GetMapping(value="/view-variables") - @ResponseStatus(HttpStatus.OK) - public Result viewVariables(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser - , @RequestParam("processInstanceId") Integer processInstanceId){ - try{ - Map result = processInstanceService.viewVariables(processInstanceId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg(),e); - return error(Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg()); - } - } - - /** - * encapsulation gantt structure - * - * @param loginUser - * @param projectName - * @param processInstanceId - * @return - */ - @ApiOperation(value = "vieGanttTree", notes= "VIEW_GANTT_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") - }) - @GetMapping(value="/view-gantt") - @ResponseStatus(HttpStatus.OK) - public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processInstanceId") Integer processInstanceId){ - try{ - Map result = processInstanceService.viewGantt(processInstanceId); - return returnDataList(result); - }catch (Exception e){ - logger.error(ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg(),e); - return error(Status.ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getCode(),ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg()); - } - } - - /** - * batch delete process instance by ids, at the same time, - * delete task instance and their mapping relation data - * - * @param loginUser - * @param projectName - * @param processInstanceIds - * @return - */ - @GetMapping(value="/batch-delete") - @ResponseStatus(HttpStatus.OK) - public Result batchDeleteProcessInstanceByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @PathVariable String projectName, - @RequestParam("processInstanceIds") String processInstanceIds - ){ - try{ - logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}", - loginUser.getUserName(), projectName, processInstanceIds); - Map result = processInstanceService.batchDeleteProcessInstanceByIds(loginUser, projectName, processInstanceIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg(),e); - return error(Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg()); - } - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProjectController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/ProjectController.java deleted file mode 100644 index 383dc687d8..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProjectController.java +++ /dev/null @@ -1,304 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.ProcessDefinitionService; -import cn.escheduler.api.service.ProjectService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import org.springframework.web.multipart.MultipartFile; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - -/** - * project controller - */ -@Api(tags = "PROJECT_TAG", position = 1) -@RestController -@RequestMapping("projects") -public class ProjectController extends BaseController { - - private static final Logger logger = LoggerFactory.getLogger(ProjectController.class); - - @Autowired - private ProjectService projectService; - - @Autowired - private ProcessDefinitionService processDefinitionService; - - /** - * create project - * - * @param loginUser - * @param projectName - * @param desc - * @return returns an error if it exists - */ - @ApiOperation(value = "createProject", notes= "CREATE_PROJECT_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType ="String"), - @ApiImplicitParam(name = "desc", value = "PROJECT_DESC", dataType = "String") - }) - @PostMapping(value = "/create") - @ResponseStatus(HttpStatus.CREATED) - public Result createProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("projectName") String projectName, - @RequestParam(value = "desc", required = false) String desc) { - - try { - logger.info("login user {}, create project name: {}, desc: {}", loginUser.getUserName(), projectName, desc); - Map result = projectService.createProject(loginUser, projectName, desc); - return returnDataList(result); - } catch (Exception e) { - logger.error(CREATE_PROJECT_ERROR.getMsg(), e); - return error(CREATE_PROJECT_ERROR.getCode(), CREATE_PROJECT_ERROR.getMsg()); - } - } - - /** - * updateProcessInstance project - * - * @param loginUser - * @param projectId - * @param projectName - * @param desc - * @return - */ - @ApiOperation(value = "updateProject", notes= "UPDATE_PROJECT_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100"), - @ApiImplicitParam(name = "projectName",value = "PROJECT_NAME",dataType = "String"), - @ApiImplicitParam(name = "desc", value = "PROJECT_DESC", dataType = "String") - }) - @PostMapping(value = "/update") - @ResponseStatus(HttpStatus.OK) - public Result updateProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("projectId") Integer projectId, - @RequestParam("projectName") String projectName, - @RequestParam(value = "desc", required = false) String desc) { - try { - logger.info("login user {} , updateProcessInstance project name: {}, desc: {}", loginUser.getUserName(), projectName, desc); - Map result = projectService.update(loginUser, projectId, projectName, desc); - return returnDataList(result); - } catch (Exception e) { - logger.error(UPDATE_PROJECT_ERROR.getMsg(), e); - return error(UPDATE_PROJECT_ERROR.getCode(), UPDATE_PROJECT_ERROR.getMsg()); - } - } - - /** - * query project details by id - * - * @param loginUser - * @param projectId - * @return - */ - @ApiOperation(value = "queryProjectById", notes= "QUERY_PROJECT_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") - }) - @GetMapping(value = "/query-by-id") - @ResponseStatus(HttpStatus.OK) - public Result queryProjectById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("projectId") Integer projectId) { - logger.info("login user {}, query project by id: {}", loginUser.getUserName(), projectId); - - try { - Map result = projectService.queryById(projectId); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg(), e); - return error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getCode(), QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg()); - } - } - - /** - * query project list paging - * - * @param loginUser - * @param searchVal - * @param pageSize - * @param pageNo - * @return - */ - @ApiOperation(value = "queryProjectListPaging", notes= "QUERY_PROJECT_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), - @ApiImplicitParam(name = "projectId", value = "PAGE_SIZE", dataType ="Int", example = "20"), - @ApiImplicitParam(name = "projectId", value = "PAGE_NO", dataType ="Int", example = "1") - }) - @GetMapping(value = "/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryProjectListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize, - @RequestParam("pageNo") Integer pageNo - ) { - - try { - logger.info("login user {}, query project list paging", loginUser.getUserName()); - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = projectService.queryProjectListPaging(loginUser, pageSize, pageNo, searchVal); - return returnDataListPaging(result); - } catch (Exception e) { - logger.error(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg(), e); - return error(Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getCode(), Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg()); - } - } - - /** - * delete project by id - * - * @param loginUser - * @param projectId - * @return - */ - @ApiOperation(value = "deleteProjectById", notes= "DELETE_PROJECT_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") - }) - @GetMapping(value = "/delete") - @ResponseStatus(HttpStatus.OK) - public Result deleteProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("projectId") Integer projectId - ) { - - try { - logger.info("login user {}, delete project: {}.", loginUser.getUserName(), projectId); - Map result = projectService.deleteProject(loginUser, projectId); - return returnDataList(result); - } catch (Exception e) { - logger.error(DELETE_PROJECT_ERROR.getMsg(), e); - return error(DELETE_PROJECT_ERROR.getCode(), DELETE_PROJECT_ERROR.getMsg()); - } - } - - /** - * query unauthorized project - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "queryUnauthorizedProject", notes= "QUERY_UNAUTHORIZED_PROJECT_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", dataType ="Int", example = "100") - }) - @GetMapping(value = "/unauth-project") - @ResponseStatus(HttpStatus.OK) - public Result queryUnauthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("userId") Integer userId) { - try { - logger.info("login user {}, query unauthorized project by user id: {}.", loginUser.getUserName(), userId); - Map result = projectService.queryUnauthorizedProject(loginUser, userId); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg(), e); - return error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getCode(), QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg()); - } - } - - - /** - * query authorized project - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "queryAuthorizedProject", notes= "QUERY_AUTHORIZED_PROJECT_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", dataType ="Int", example = "100") - }) - @GetMapping(value = "/authed-project") - @ResponseStatus(HttpStatus.OK) - public Result queryAuthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("userId") Integer userId) { - try { - logger.info("login user {}, query authorized project by user id: {}.", loginUser.getUserName(), userId); - Map result = projectService.queryAuthorizedProject(loginUser, userId); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_AUTHORIZED_PROJECT.getMsg(), e); - return error(QUERY_AUTHORIZED_PROJECT.getCode(), QUERY_AUTHORIZED_PROJECT.getMsg()); - } - } - - /** - * import process definition - * - * @param loginUser - * @param file - * @return - */ - @ApiOperation(value = "importProcessDefinition", notes= "EXPORT_PROCCESS_DEFINITION_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") - }) - @PostMapping(value="/importProcessDefinition") - public Result importProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("file") MultipartFile file){ - try{ - logger.info("import process definition by id, login user:{}", - loginUser.getUserName()); - Map result = processDefinitionService.importProcessDefinition(loginUser,file); - return returnDataList(result); - }catch (Exception e){ - logger.error(IMPORT_PROCESS_DEFINE_ERROR.getMsg(),e); - return error(IMPORT_PROCESS_DEFINE_ERROR.getCode(), IMPORT_PROCESS_DEFINE_ERROR.getMsg()); - } - } - - /** - * query all project list - * @param loginUser - * @return - */ - @ApiOperation(value = "queryAllProjectList", notes= "QUERY_ALL_PROJECT_LIST_NOTES") - @GetMapping(value = "/queryAllProjectList") - @ResponseStatus(HttpStatus.OK) - public Result queryAllProjectList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - - try { - logger.info("login user {}, query all project list", loginUser.getUserName()); - Map result = projectService.queryAllProjectList(); - return returnDataList(result); - } catch (Exception e) { - logger.error(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg(), e); - return error(Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getCode(), Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg()); - } - } - - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/QueueController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/QueueController.java deleted file mode 100644 index 8023cd8030..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/QueueController.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.QueueService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - - -/** - * queue controller - */ -@Api(tags = "QUEUE_TAG", position = 1) -@RestController -@RequestMapping("/queue") -public class QueueController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(QueueController.class); - - @Autowired - private QueueService queueService; - - - /** - * query queue list - * @param loginUser - * @return - */ - @ApiOperation(value = "queryList", notes= "QUERY_QUEUE_LIST_NOTES") - @GetMapping(value="/list") - @ResponseStatus(HttpStatus.OK) - public Result queryList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ - try{ - logger.info("login user {}, query queue list", loginUser.getUserName()); - Map result = queueService.queryList(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_QUEUE_LIST_ERROR.getMsg(),e); - return error(QUERY_QUEUE_LIST_ERROR.getCode(), QUERY_QUEUE_LIST_ERROR.getMsg()); - } - } - - /** - * query queue list paging - * @param loginUser - * @return - */ - @ApiOperation(value = "queryQueueListPaging", notes= "QUERY_QUEUE_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") - }) - @GetMapping(value="/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryQueueListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ - try{ - logger.info("login user {}, query queue list,search value:{}", loginUser.getUserName(),searchVal); - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - - searchVal = ParameterUtils.handleEscapes(searchVal); - result = queueService.queryList(loginUser,searchVal,pageNo,pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_QUEUE_LIST_ERROR.getMsg(),e); - return error(QUERY_QUEUE_LIST_ERROR.getCode(), QUERY_QUEUE_LIST_ERROR.getMsg()); - } - } - - /** - * create queue - * - * @param loginUser - * @param queue - * @param queueName - * @return - */ - @ApiOperation(value = "createQueue", notes= "CREATE_QUEUE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true,dataType ="String"), - @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String") - }) - @PostMapping(value = "/create") - @ResponseStatus(HttpStatus.CREATED) - public Result createQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "queue") String queue, - @RequestParam(value = "queueName") String queueName) { - logger.info("login user {}, create queue, queue: {}, queueName: {}", - loginUser.getUserName(), queue, queueName); - try { - Map result = queueService.createQueue(loginUser,queue,queueName); - return returnDataList(result); - - }catch (Exception e){ - logger.error(CREATE_QUEUE_ERROR.getMsg(),e); - return error(CREATE_QUEUE_ERROR.getCode(), CREATE_QUEUE_ERROR.getMsg()); - } - } - - /** - * update queue - * - * @param loginUser - * @param queue - * @param queueName - * @return - */ - @ApiOperation(value = "updateQueue", notes= "UPDATE_QUEUE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String") - }) - @PostMapping(value = "/update") - @ResponseStatus(HttpStatus.CREATED) - public Result updateQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id, - @RequestParam(value = "queue") String queue, - @RequestParam(value = "queueName") String queueName) { - logger.info("login user {}, update queue, id: {}, queue: {}, queueName: {}", - loginUser.getUserName(), id,queue, queueName); - try { - Map result = queueService.updateQueue(loginUser,id,queue,queueName); - return returnDataList(result); - - }catch (Exception e){ - logger.error(UPDATE_QUEUE_ERROR.getMsg(),e); - return error(UPDATE_QUEUE_ERROR.getCode(), UPDATE_QUEUE_ERROR.getMsg()); - } - } - - /** - * verify queue and queue name - * - * @param loginUser - * @param queue - * @param queueName - * @return - */ - @ApiOperation(value = "verifyQueue", notes= "VERIFY_QUEUE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String") - }) - @PostMapping(value = "/verify-queue") - @ResponseStatus(HttpStatus.OK) - public Result verifyQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="queue") String queue, - @RequestParam(value ="queueName") String queueName - ) { - - try{ - logger.info("login user {}, verfiy queue: {} queue name: {}", - loginUser.getUserName(),queue,queueName); - return queueService.verifyQueue(queue,queueName); - }catch (Exception e){ - logger.error(VERIFY_QUEUE_ERROR.getMsg(),e); - return error(Status.VERIFY_QUEUE_ERROR.getCode(), Status.VERIFY_QUEUE_ERROR.getMsg()); - } - } - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/ResourcesController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/ResourcesController.java deleted file mode 100644 index 578bd55277..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/ResourcesController.java +++ /dev/null @@ -1,715 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.ResourcesService; -import cn.escheduler.api.service.UdfFuncService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.ResourceType; -import cn.escheduler.common.enums.UdfType; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.core.io.Resource; -import org.springframework.http.HttpHeaders; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.*; -import org.springframework.web.multipart.MultipartFile; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - -/** - * resources controller - */ -@Api(tags = "RESOURCES_TAG", position = 1) -@RestController -@RequestMapping("resources") -public class ResourcesController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(ResourcesController.class); - - - @Autowired - private ResourcesService resourceService; - @Autowired - private UdfFuncService udfFuncService; - - /** - * create resource - * - * @param loginUser - * @param alias - * @param desc - * @param file - */ - @ApiOperation(value = "createResource", notes= "CREATE_RESOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "des", value = "RESOURCE_DESC", dataType ="String"), - @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") - }) - @PostMapping(value = "/create") - public Result createResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") ResourceType type, - @RequestParam(value ="name")String alias, - @RequestParam(value = "desc", required = false) String desc, - @RequestParam("file") MultipartFile file) { - try { - logger.info("login user {}, create resource, type: {}, resource alias: {}, desc: {}, file: {},{}", - loginUser.getUserName(),type, alias, desc, file.getName(), file.getOriginalFilename()); - return resourceService.createResource(loginUser,alias, desc,type ,file); - } catch (Exception e) { - logger.error(CREATE_RESOURCE_ERROR.getMsg(),e); - return error(CREATE_RESOURCE_ERROR.getCode(), CREATE_RESOURCE_ERROR.getMsg()); - } - } - - /** - * update resource - * - * @param loginUser - * @param alias - * @param desc - */ - @ApiOperation(value = "createResource", notes= "CREATE_RESOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "des", value = "RESOURCE_DESC", dataType ="String"), - @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true,dataType = "MultipartFile") - }) - @PostMapping(value = "/update") - public Result updateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="id") int resourceId, - @RequestParam(value = "type") ResourceType type, - @RequestParam(value ="name")String alias, - @RequestParam(value = "desc", required = false) String desc) { - try { - logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}", - loginUser.getUserName(),type, alias, desc); - return resourceService.updateResource(loginUser,resourceId,alias, desc,type); - } catch (Exception e) { - logger.error(UPDATE_RESOURCE_ERROR.getMsg(),e); - return error(Status.UPDATE_RESOURCE_ERROR.getCode(), Status.UPDATE_RESOURCE_ERROR.getMsg()); - } - } - - /** - * query resources list - * - * @param loginUser - * @return - */ - @ApiOperation(value = "querytResourceList", notes= "QUERY_RESOURCE_LIST_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType") - }) - @GetMapping(value="/list") - @ResponseStatus(HttpStatus.OK) - public Result querytResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="type") ResourceType type - ){ - try{ - logger.info("query resource list, login user:{}, resource type:{}", loginUser.getUserName(), type.toString()); - Map result = resourceService.queryResourceList(loginUser, type); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_RESOURCES_LIST_ERROR.getMsg(),e); - return error(Status.QUERY_RESOURCES_LIST_ERROR.getCode(), Status.QUERY_RESOURCES_LIST_ERROR.getMsg()); - } - } - - /** - * query resources list paging - * - * @param loginUser - * @param pageNo - * @param pageSize - * @return - */ - @ApiOperation(value = "querytResourceListPaging", notes= "QUERY_RESOURCE_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") - }) - @GetMapping(value="/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result querytResourceListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="type") ResourceType type, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize - ){ - try{ - logger.info("query resource list, login user:{}, resource type:{}, search value:{}", - loginUser.getUserName(), type.toString(), searchVal); - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - - searchVal = ParameterUtils.handleEscapes(searchVal); - result = resourceService.queryResourceListPaging(loginUser,type,searchVal,pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_RESOURCES_LIST_PAGING.getMsg(),e); - return error(Status.QUERY_RESOURCES_LIST_PAGING.getCode(), Status.QUERY_RESOURCES_LIST_PAGING.getMsg()); - } - } - - - /** - * delete resource - * - * @param loginUser - * @param resourceId - */ - @ApiOperation(value = "deleteResource", notes= "DELETE_RESOURCE_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/delete") - @ResponseStatus(HttpStatus.OK) - public Result deleteResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="id") int resourceId - ) { - try{ - logger.info("login user {}, delete resource id: {}", - loginUser.getUserName(),resourceId); - return resourceService.delete(loginUser,resourceId); - }catch (Exception e){ - logger.error(DELETE_RESOURCE_ERROR.getMsg(),e); - return error(Status.DELETE_RESOURCE_ERROR.getCode(), Status.DELETE_RESOURCE_ERROR.getMsg()); - } - } - - - /** - * verify resource by alias and type - * - * @param loginUser - * @param alias - * @param type - * @return - */ - @ApiOperation(value = "verifyResourceName", notes= "VERIFY_RESOURCE_NAME_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType ="String") - }) - @GetMapping(value = "/verify-name") - @ResponseStatus(HttpStatus.OK) - public Result verifyResourceName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="name") String alias, - @RequestParam(value ="type") ResourceType type - ) { - try { - logger.info("login user {}, verfiy resource alias: {},resource type: {}", - loginUser.getUserName(), alias,type); - - return resourceService.verifyResourceName(alias,type,loginUser); - } catch (Exception e) { - logger.error(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg(), e); - return error(Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getCode(), Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg()); - } - } - - /** - * view resource file online - * - * @param loginUser - * @param resourceId - */ - @ApiOperation(value = "viewResource", notes= "VIEW_RESOURCE_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/view") - public Result viewResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int resourceId, - @RequestParam(value = "skipLineNum") int skipLineNum, - @RequestParam(value = "limit") int limit - ) { - try{ - logger.info("login user {}, view resource : {}, skipLineNum {} , limit {}", - loginUser.getUserName(),resourceId,skipLineNum,limit); - - return resourceService.readResource(resourceId,skipLineNum,limit); - }catch (Exception e){ - logger.error(VIEW_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); - return error(Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); - } - } - - /** - * create resource file online - * - * @param loginUser - * @param type - * @param fileName - * @param fileSuffix - * @param desc - * @param content - * @return - */ - @ApiOperation(value = "onlineCreateResource", notes= "ONLINE_CREATE_RESOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "fileName", value = "RESOURCE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "suffix", value = "SUFFIX", required = true, dataType ="String"), - @ApiImplicitParam(name = "des", value = "RESOURCE_DESC", dataType ="String"), - @ApiImplicitParam(name = "content", value = "CONTENT",required = true, dataType ="String") - }) - @PostMapping(value = "/online-create") - public Result onlineCreateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") ResourceType type, - @RequestParam(value ="fileName")String fileName, - @RequestParam(value ="suffix")String fileSuffix, - @RequestParam(value = "desc", required = false) String desc, - @RequestParam(value = "content") String content - ) { - try{ - logger.info("login user {}, online create resource! fileName : {}, type : {}, suffix : {},desc : {},content : {}", - loginUser.getUserName(),type,fileName,fileSuffix,desc,content); - if(StringUtils.isEmpty(content)){ - logger.error("resource file contents are not allowed to be empty"); - return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); - } - return resourceService.onlineCreateResource(loginUser,type,fileName,fileSuffix,desc,content); - }catch (Exception e){ - logger.error(CREATE_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); - return error(Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); - } - } - - /** - * edit resource file online - * - * @param loginUser - * @param resourceId - */ - @ApiOperation(value = "updateResourceContent", notes= "UPDATE_RESOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "content", value = "CONTENT",required = true, dataType ="String") - }) - @PostMapping(value = "/update-content") - public Result updateResourceContent(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int resourceId, - @RequestParam(value = "content") String content - ) { - try{ - logger.info("login user {}, updateProcessInstance resource : {}", - loginUser.getUserName(),resourceId); - if(StringUtils.isEmpty(content)){ - logger.error("The resource file contents are not allowed to be empty"); - return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); - } - return resourceService.updateResourceContent(resourceId,content); - }catch (Exception e){ - logger.error(EDIT_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); - return error(Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); - } - } - - /** - * download resource file - * - * @param loginUser - * @param resourceId - */ - @ApiOperation(value = "downloadResource", notes= "DOWNLOAD_RESOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/download") - @ResponseBody - public ResponseEntity downloadResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int resourceId) { - try{ - logger.info("login user {}, download resource : {}", - loginUser.getUserName(), resourceId); - Resource file = resourceService.downloadResource(resourceId); - if (file == null) { - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.RESOURCE_NOT_EXIST.getMsg()); - } - return ResponseEntity - .ok() - .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + file.getFilename() + "\"") - .body(file); - }catch (Exception e){ - logger.error(DOWNLOAD_RESOURCE_FILE_ERROR.getMsg(),e); - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.DOWNLOAD_RESOURCE_FILE_ERROR.getMsg()); - } - } - - - /** - * create udf function - * @param loginUser - * @param type - * @param funcName - * @param argTypes - * @param database - * @param desc - * @param resourceId - * @return - */ - @ApiOperation(value = "createUdfFunc", notes= "CREATE_UDF_FUNCTION_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType ="UdfType"), - @ApiImplicitParam(name = "funcName", value = "FUNC_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType ="String"), - @ApiImplicitParam(name = "desc", value = "UDF_DESC", dataType ="String"), - @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") - - }) - @PostMapping(value = "/udf-func/create") - @ResponseStatus(HttpStatus.CREATED) - public Result createUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") UdfType type, - @RequestParam(value ="funcName")String funcName, - @RequestParam(value ="className")String className, - @RequestParam(value ="argTypes", required = false)String argTypes, - @RequestParam(value ="database", required = false)String database, - @RequestParam(value = "desc", required = false) String desc, - @RequestParam(value = "resourceId") int resourceId) { - logger.info("login user {}, create udf function, type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}", - loginUser.getUserName(),type, funcName, argTypes,database,desc, resourceId); - Result result = new Result(); - - try { - return udfFuncService.createUdfFunction(loginUser,funcName,className,argTypes,database,desc,type,resourceId); - } catch (Exception e) { - logger.error(CREATE_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.CREATE_UDF_FUNCTION_ERROR.getCode(), Status.CREATE_UDF_FUNCTION_ERROR.getMsg()); - } - } - - /** - * view udf function - * - * @param loginUser - * @param id - * @return - */ - @ApiOperation(value = "viewUIUdfFunction", notes= "VIEW_UDF_FUNCTION_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") - - }) - @GetMapping(value = "/udf-func/update-ui") - @ResponseStatus(HttpStatus.OK) - public Result viewUIUdfFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("id") int id) - { - Result result = new Result(); - logger.info("login user {}, query udf{}", - loginUser.getUserName(), id); - try { - Map map = udfFuncService.queryUdfFuncDetail(id); - return returnDataList(map); - } catch (Exception e) { - logger.error(VIEW_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.VIEW_UDF_FUNCTION_ERROR.getCode(), Status.VIEW_UDF_FUNCTION_ERROR.getMsg()); - } - } - - /** - * updateProcessInstance udf function - * - * @param loginUser - * @param type - * @param funcName - * @param argTypes - * @param database - * @param desc - * @param resourceId - * @return - */ - @ApiOperation(value = "updateUdfFunc", notes= "UPDATE_UDF_FUNCTION_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType ="UdfType"), - @ApiImplicitParam(name = "funcName", value = "FUNC_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType ="String"), - @ApiImplicitParam(name = "desc", value = "UDF_DESC", dataType ="String"), - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") - - }) - @PostMapping(value = "/udf-func/update") - public Result updateUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int udfFuncId, - @RequestParam(value = "type") UdfType type, - @RequestParam(value ="funcName")String funcName, - @RequestParam(value ="className")String className, - @RequestParam(value ="argTypes", required = false)String argTypes, - @RequestParam(value ="database", required = false)String database, - @RequestParam(value = "desc", required = false) String desc, - @RequestParam(value = "resourceId") int resourceId) { - try { - logger.info("login user {}, updateProcessInstance udf function id: {},type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}", - loginUser.getUserName(),udfFuncId,type, funcName, argTypes,database,desc, resourceId); - Map result = udfFuncService.updateUdfFunc(udfFuncId,funcName,className,argTypes,database,desc,type,resourceId); - return returnDataList(result); - } catch (Exception e) { - logger.error(UPDATE_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.UPDATE_UDF_FUNCTION_ERROR.getCode(), Status.UPDATE_UDF_FUNCTION_ERROR.getMsg()); - } - } - - /** - * query udf function list paging - * - * @param loginUser - * @param pageNo - * @param pageSize - * @return - */ - @ApiOperation(value = "queryUdfFuncListPaging", notes= "QUERY_UDF_FUNCTION_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") - }) - @GetMapping(value="/udf-func/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize - ){ - try{ - logger.info("query udf functions list, login user:{},search value:{}", - loginUser.getUserName(), searchVal); - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - - result = udfFuncService.queryUdfFuncListPaging(loginUser,searchVal,pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getCode(), Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getMsg()); - } - } - - /** - * query resource list by type - * - * @param loginUser - * @return - */ - @ApiOperation(value = "queryResourceList", notes= "QUERY_RESOURCE_LIST_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType ="UdfType") - }) - @GetMapping(value="/udf-func/list") - @ResponseStatus(HttpStatus.OK) - public Result queryResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("type") UdfType type){ - try{ - logger.info("query datasource list, user:{}, type:{}", loginUser.getUserName(), type.toString()); - Map result = udfFuncService.queryResourceList(loginUser,type.ordinal()); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_DATASOURCE_BY_TYPE_ERROR.getMsg(),e); - return error(Status.QUERY_DATASOURCE_BY_TYPE_ERROR.getCode(),QUERY_DATASOURCE_BY_TYPE_ERROR.getMsg()); - } - } - - /** - * verify udf function name can use or not - * - * @param loginUser - * @param name - * @return - */ - @ApiOperation(value = "verifyUdfFuncName", notes= "VERIFY_UDF_FUNCTION_NAME_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "FUNC_NAME",required = true, dataType ="String") - - }) - @GetMapping(value = "/udf-func/verify-name") - @ResponseStatus(HttpStatus.OK) - public Result verifyUdfFuncName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="name") String name - ) { - logger.info("login user {}, verfiy udf function name: {}", - loginUser.getUserName(),name); - - try{ - - return udfFuncService.verifyUdfFuncByName(name); - }catch (Exception e){ - logger.error(VERIFY_UDF_FUNCTION_NAME_ERROR.getMsg(),e); - return error(Status.VERIFY_UDF_FUNCTION_NAME_ERROR.getCode(), Status.VERIFY_UDF_FUNCTION_NAME_ERROR.getMsg()); - } - } - - /** - * delete udf function - * - * @param loginUser - * @param udfFuncId - */ - @ApiOperation(value = "deleteUdfFunc", notes= "DELETE_UDF_FUNCTION_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/udf-func/delete") - @ResponseStatus(HttpStatus.OK) - public Result deleteUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="id") int udfFuncId - ) { - try{ - - logger.info("login user {}, delete udf function id: {}", loginUser.getUserName(),udfFuncId); - return udfFuncService.delete(udfFuncId); - }catch (Exception e){ - logger.error(DELETE_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.DELETE_UDF_FUNCTION_ERROR.getCode(), Status.DELETE_UDF_FUNCTION_ERROR.getMsg()); - } - } - - /** - * authorized file resource list - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "authorizedFile", notes= "AUTHORIZED_FILE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/authed-file") - @ResponseStatus(HttpStatus.CREATED) - public Result authorizedFile(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("userId") Integer userId) { - try{ - logger.info("authorized file resource, user: {}, user id:{}", loginUser.getUserName(), userId); - Map result = resourceService.authorizedFile(loginUser, userId); - return returnDataList(result); - }catch (Exception e){ - logger.error(AUTHORIZED_FILE_RESOURCE_ERROR.getMsg(),e); - return error(Status.AUTHORIZED_FILE_RESOURCE_ERROR.getCode(), Status.AUTHORIZED_FILE_RESOURCE_ERROR.getMsg()); - } - } - - - /** - * unauthorized file resource list - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "unauthorizedFile", notes= "UNAUTHORIZED_FILE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/unauth-file") - @ResponseStatus(HttpStatus.CREATED) - public Result unauthorizedFile(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("userId") Integer userId) { - try{ - logger.info("resource unauthorized file, user:{}, unauthorized user id:{}", loginUser.getUserName(), userId); - Map result = resourceService.unauthorizedFile(loginUser, userId); - return returnDataList(result); - }catch (Exception e){ - logger.error(UNAUTHORIZED_FILE_RESOURCE_ERROR.getMsg(),e); - return error(Status.UNAUTHORIZED_FILE_RESOURCE_ERROR.getCode(), Status.UNAUTHORIZED_FILE_RESOURCE_ERROR.getMsg()); - } - } - - - /** - * unauthorized udf function - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "unauthUDFFunc", notes= "UNAUTHORIZED_UDF_FUNC_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/unauth-udf-func") - @ResponseStatus(HttpStatus.CREATED) - public Result unauthUDFFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("userId") Integer userId) { - try{ - logger.info("unauthorized udf function, login user:{}, unauthorized user id:{}", loginUser.getUserName(), userId); - - Map result = resourceService.unauthorizedUDFFunction(loginUser, userId); - return returnDataList(result); - }catch (Exception e){ - logger.error(UNAUTHORIZED_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.UNAUTHORIZED_UDF_FUNCTION_ERROR.getCode(), Status.UNAUTHORIZED_UDF_FUNCTION_ERROR.getMsg()); - } - } - - - /** - * authorized udf function - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "authUDFFunc", notes= "AUTHORIZED_UDF_FUNC_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") - }) - @GetMapping(value = "/authed-udf-func") - @ResponseStatus(HttpStatus.CREATED) - public Result authorizedUDFFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("userId") Integer userId) { - try{ - logger.info("auth udf function, login user:{}, auth user id:{}", loginUser.getUserName(), userId); - Map result = resourceService.authorizedUDFFunction(loginUser, userId); - return returnDataList(result); - }catch (Exception e){ - logger.error(AUTHORIZED_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.AUTHORIZED_UDF_FUNCTION_ERROR.getCode(), Status.AUTHORIZED_UDF_FUNCTION_ERROR.getMsg()); - } - } -} \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java deleted file mode 100644 index de3aea29dd..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.SchedulerService; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.FailureStrategy; -import cn.escheduler.common.enums.Priority; -import cn.escheduler.common.enums.ReleaseState; -import cn.escheduler.common.enums.WarningType; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; -import static cn.escheduler.api.utils.Constants.SESSION_USER; - -/** - * schedule controller - */ -@Api(tags = "SCHEDULER_TAG", position = 13) -@RestController -@RequestMapping("/projects/{projectName}/schedule") -public class SchedulerController extends BaseController { - - private static final Logger logger = LoggerFactory.getLogger(SchedulerController.class); - public static final String DEFAULT_WARNING_TYPE = "NONE"; - public static final String DEFAULT_NOTIFY_GROUP_ID = "1"; - public static final String DEFAULT_FAILURE_POLICY = "CONTINUE"; - - - @Autowired - private SchedulerService schedulerService; - - - /** - * create schedule - * - * @param loginUser - * @param projectName - * @param processDefinitionId - * @param schedule - * @param warningType - * @param warningGroupId - * @param failureStrategy - * @return - */ - @ApiOperation(value = "createSchedule", notes= "CREATE_SCHEDULE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type ="WarningType"), - @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type ="FailureStrategy"), - @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type ="String"), - @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type ="String"), - @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type ="Priority"), - }) - @PostMapping("/create") - @ResponseStatus(HttpStatus.CREATED) - public Result createSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "processDefinitionId") Integer processDefinitionId, - @RequestParam(value = "schedule") String schedule, - @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, - @RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId, - @RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy, - @RequestParam(value = "receivers", required = false) String receivers, - @RequestParam(value = "receiversCc", required = false) String receiversCc, - @RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId, - @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { - logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," + - "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}, workGroupId:{}", - loginUser.getUserName(), projectName, processDefinitionId, schedule, warningType, warningGroupId, - failureStrategy, receivers, receiversCc, processInstancePriority, workerGroupId); - try { - Map result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule, - warningType, warningGroupId, failureStrategy, receivers, receiversCc, processInstancePriority, workerGroupId); - - return returnDataList(result); - } catch (Exception e) { - logger.error(CREATE_SCHEDULE_ERROR.getMsg(), e); - return error(CREATE_SCHEDULE_ERROR.getCode(), CREATE_SCHEDULE_ERROR.getMsg()); - } - } - - /** - * updateProcessInstance schedule - * - * @param loginUser - * @param projectName - * @param id - * @param schedule - * @param warningType - * @param warningGroupId - * @param failureStrategy - * @return - */ - @ApiOperation(value = "updateSchedule", notes= "UPDATE_SCHEDULE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type ="WarningType"), - @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type ="FailureStrategy"), - @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type ="String"), - @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type ="String"), - @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type ="Priority"), - }) - @PostMapping("/update") - public Result updateSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "id") Integer id, - @RequestParam(value = "schedule") String schedule, - @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, - @RequestParam(value = "warningGroupId", required = false) int warningGroupId, - @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, - @RequestParam(value = "receivers", required = false) String receivers, - @RequestParam(value = "receiversCc", required = false) String receiversCc, - @RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId, - @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { - logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " + - "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {},workerGroupId:{}", - loginUser.getUserName(), projectName, id, schedule, warningType, warningGroupId, failureStrategy, - receivers, receiversCc, processInstancePriority, workerGroupId); - - try { - Map result = schedulerService.updateSchedule(loginUser, projectName, id, schedule, - warningType, warningGroupId, failureStrategy, receivers, receiversCc, null, processInstancePriority, workerGroupId); - return returnDataList(result); - - } catch (Exception e) { - logger.error(UPDATE_SCHEDULE_ERROR.getMsg(), e); - return error(Status.UPDATE_SCHEDULE_ERROR.getCode(), Status.UPDATE_SCHEDULE_ERROR.getMsg()); - } - } - - /** - * publish schedule setScheduleState - * - * @param loginUser - * @param projectName - * @param id - * @return - * @throws Exception - */ - @ApiOperation(value = "online", notes= "ONLINE_SCHEDULE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") - }) - @PostMapping("/online") - public Result online(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName, - @RequestParam("id") Integer id) { - logger.info("login user {}, schedule setScheduleState, project name: {}, id: {}", - loginUser.getUserName(), projectName, id); - try { - Map result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.ONLINE); - return returnDataList(result); - - } catch (Exception e) { - logger.error(PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg(), e); - return error(Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getCode(), Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg()); - } - } - - /** - * offline schedule - * - * @param loginUser - * @param projectName - * @param id - * @return - */ - @ApiOperation(value = "offline", notes= "OFFLINE_SCHEDULE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") - }) - @PostMapping("/offline") - public Result offline(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName, - @RequestParam("id") Integer id) { - logger.info("login user {}, schedule offline, project name: {}, process definition id: {}", - loginUser.getUserName(), projectName, id); - - try { - Map result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.OFFLINE); - return returnDataList(result); - - } catch (Exception e) { - logger.error(OFFLINE_SCHEDULE_ERROR.getMsg(), e); - return error(Status.OFFLINE_SCHEDULE_ERROR.getCode(), Status.OFFLINE_SCHEDULE_ERROR.getMsg()); - } - } - - /** - * query schedule list paging - * - * @param loginUser - * @param projectName - * @param processDefinitionId - * @return - */ - @ApiOperation(value = "queryScheduleListPaging", notes= "QUERY_SCHEDULE_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true,dataType = "Int", example = "100"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") - - }) - @GetMapping("/list-paging") - public Result queryScheduleListPaging(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam Integer processDefinitionId, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageNo") Integer pageNo, - @RequestParam("pageSize") Integer pageSize) { - logger.info("login user {}, query schedule, project name: {}, process definition id: {}", - loginUser.getUserName(), projectName, processDefinitionId); - try { - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg()); - } - - } - - /** - * delete schedule by id - * - * @param loginUser - * @param projectName - * @param scheduleId - * @return - */ - @ApiOperation(value = "deleteScheduleById", notes= "OFFLINE_SCHEDULE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "scheduleId", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") - }) - @GetMapping(value="/delete") - @ResponseStatus(HttpStatus.OK) - public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser, - @PathVariable String projectName, - @RequestParam("scheduleId") Integer scheduleId - ){ - try{ - logger.info("delete schedule by id, login user:{}, project name:{}, schedule id:{}", - loginUser.getUserName(), projectName, scheduleId); - Map result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId); - return returnDataList(result); - }catch (Exception e){ - logger.error(DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getCode(), Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg()); - } - } - /** - * query schedule list - * - * @param loginUser - * @param projectName - * @return - */ - @ApiOperation(value = "queryScheduleList", notes= "QUERY_SCHEDULE_LIST_NOTES") - @PostMapping("/list") - public Result queryScheduleList(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName) { - try { - logger.info("login user {}, query schedule list, project name: {}", - loginUser.getUserName(), projectName); - Map result = schedulerService.queryScheduleList(loginUser, projectName); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_SCHEDULE_LIST_ERROR.getMsg(), e); - return error(Status.QUERY_SCHEDULE_LIST_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_ERROR.getMsg()); - } - } - - /** - * preview schedule - * - * @param loginUser - * @param projectName - * @param schedule - * @return - */ - @ApiOperation(value = "previewSchedule", notes= "PREVIEW_SCHEDULE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), - }) - @PostMapping("/preview") - @ResponseStatus(HttpStatus.CREATED) - public Result previewSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "schedule") String schedule - ){ - logger.info("login user {}, project name: {}, preview schedule: {}", - loginUser.getUserName(), projectName, schedule); - try { - Map result = schedulerService.previewSchedule(loginUser, projectName, schedule); - return returnDataList(result); - } catch (Exception e) { - logger.error(PREVIEW_SCHEDULE_ERROR.getMsg(), e); - return error(PREVIEW_SCHEDULE_ERROR.getCode(), PREVIEW_SCHEDULE_ERROR.getMsg()); - } - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/TaskInstanceController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/TaskInstanceController.java deleted file mode 100644 index 22a171fa2e..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/TaskInstanceController.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.service.TaskInstanceService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.QUERY_TASK_LIST_PAGING_ERROR; - -/** - * task instance controller - */ -@Api(tags = "TASK_INSTANCE_TAG", position = 11) -@RestController -@RequestMapping("/projects/{projectName}/task-instance") -public class TaskInstanceController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(TaskInstanceController.class); - - @Autowired - TaskInstanceService taskInstanceService; - - - /** - * query task list paging - * - * @param loginUser - * @return - */ - @ApiOperation(value = "queryTaskListPaging", notes= "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID",required = false, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), - @ApiImplicitParam(name = "taskName", value = "TASK_NAME", type ="String"), - @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"), - @ApiImplicitParam(name = "host", value = "HOST", type ="String"), - @ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", type ="String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") - }) - @GetMapping("/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryTaskListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam(value = "taskName", required = false) String taskName, - @RequestParam(value = "stateType", required = false) ExecutionStatus stateType, - @RequestParam(value = "host", required = false) String host, - @RequestParam(value = "startDate", required = false) String startTime, - @RequestParam(value = "endDate", required = false) String endTime, - @RequestParam("pageNo") Integer pageNo, - @RequestParam("pageSize") Integer pageSize){ - - try{ - logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, state type:{}, host:{}, start:{}, end:{}", - projectName, processInstanceId, searchVal, taskName, stateType, host, startTime, endTime); - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = taskInstanceService.queryTaskListPaging( - loginUser, projectName, processInstanceId, taskName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_TASK_LIST_PAGING_ERROR.getMsg(),e); - return error(QUERY_TASK_LIST_PAGING_ERROR.getCode(), QUERY_TASK_LIST_PAGING_ERROR.getMsg()); - } - - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/TaskRecordController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/TaskRecordController.java deleted file mode 100644 index 681d19316a..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/TaskRecordController.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.service.TaskRecordService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.dao.entity.User; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR; - -/** - * data quality controller - */ -@ApiIgnore -@RestController -@RequestMapping("/projects/task-record") -public class TaskRecordController extends BaseController{ - - - private static final Logger logger = LoggerFactory.getLogger(TaskRecordController.class); - - - @Autowired - TaskRecordService taskRecordService; - - /** - * query task record list paging - * - * @param loginUser - * @return - */ - @GetMapping("/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "taskName", required = false) String taskName, - @RequestParam(value = "state", required = false) String state, - @RequestParam(value = "sourceTable", required = false) String sourceTable, - @RequestParam(value = "destTable", required = false) String destTable, - @RequestParam(value = "taskDate", required = false) String taskDate, - @RequestParam(value = "startDate", required = false) String startTime, - @RequestParam(value = "endDate", required = false) String endTime, - @RequestParam("pageNo") Integer pageNo, - @RequestParam("pageSize") Integer pageSize - ){ - - try{ - logger.info("query task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}", - taskName, state, taskDate, startTime, endTime); - Map result = taskRecordService.queryTaskRecordListPaging(false, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e); - return error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg()); - } - - } - - /** - * query history task record list paging - * - * @param loginUser - * @return - */ - @GetMapping("/history-list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryHistoryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "taskName", required = false) String taskName, - @RequestParam(value = "state", required = false) String state, - @RequestParam(value = "sourceTable", required = false) String sourceTable, - @RequestParam(value = "destTable", required = false) String destTable, - @RequestParam(value = "taskDate", required = false) String taskDate, - @RequestParam(value = "startDate", required = false) String startTime, - @RequestParam(value = "endDate", required = false) String endTime, - @RequestParam("pageNo") Integer pageNo, - @RequestParam("pageSize") Integer pageSize - ){ - - try{ - logger.info("query hisotry task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}", - taskName, state, taskDate, startTime, endTime); - Map result = taskRecordService.queryTaskRecordListPaging(true, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e); - return error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg()); - } - - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/TenantController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/TenantController.java deleted file mode 100644 index afd469d45e..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/TenantController.java +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.TenantService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - - -/** - * tenant controller - */ -@Api(tags = "TENANT_TAG", position = 1) -@RestController -@RequestMapping("/tenant") -public class TenantController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(TenantController.class); - - - @Autowired - private TenantService tenantService; - - /** - * create tenant - * - * @param loginUser - * @param tenantCode - * @param tenantName - * @param queueId - * @param desc - * @return - */ - @ApiOperation(value = "createTenant", notes= "CREATE_TENANT_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"), - @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType ="Int",example = "100"), - @ApiImplicitParam(name = "desc", value = "TENANT_DESC", dataType ="String") - - }) - @PostMapping(value = "/create") - @ResponseStatus(HttpStatus.CREATED) - public Result createTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "tenantCode") String tenantCode, - @RequestParam(value = "tenantName") String tenantName, - @RequestParam(value = "queueId") int queueId, - @RequestParam(value = "desc",required = false) String desc) { - logger.info("login user {}, create tenant, tenantCode: {}, tenantName: {}, queueId: {}, desc: {}", - loginUser.getUserName(), tenantCode, tenantName, queueId,desc); - try { - Map result = tenantService.createTenant(loginUser,tenantCode,tenantName,queueId,desc); - return returnDataList(result); - - }catch (Exception e){ - logger.error(CREATE_TENANT_ERROR.getMsg(),e); - return error(CREATE_TENANT_ERROR.getCode(), CREATE_TENANT_ERROR.getMsg()); - } - } - - - /** - * query tenant list paging - * - * @param loginUser - * @param pageNo - * @param searchVal - * @param pageSize - * @return - */ - @ApiOperation(value = "queryTenantlistPaging", notes= "QUERY_TENANT_LIST_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") - }) - @GetMapping(value="/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryTenantlistPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ - logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", - loginUser.getUserName(),pageNo,searchVal,pageSize); - try{ - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = tenantService.queryTenantList(loginUser, searchVal, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_TENANT_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_TENANT_LIST_PAGING_ERROR.getCode(), Status.QUERY_TENANT_LIST_PAGING_ERROR.getMsg()); - } - } - - - /** - * tenant list - * - * @param loginUser - * @return - */ - @ApiOperation(value = "queryTenantlist", notes= "QUERY_TENANT_LIST_NOTES") - @GetMapping(value="/list") - @ResponseStatus(HttpStatus.OK) - public Result queryTenantlist(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ - logger.info("login user {}, query tenant list"); - try{ - Map result = tenantService.queryTenantList(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_TENANT_LIST_ERROR.getMsg(),e); - return error(Status.QUERY_TENANT_LIST_ERROR.getCode(), Status.QUERY_TENANT_LIST_ERROR.getMsg()); - } - } - - - - /** - * udpate tenant - * - * @param loginUser - * @param tenantCode - * @param tenantName - * @param queueId - * @param desc - * @return - */ - @ApiOperation(value = "updateTenant", notes= "UPDATE_TENANT_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"), - @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "desc", value = "TENANT_DESC", type ="String") - - }) - @PostMapping(value = "/update") - @ResponseStatus(HttpStatus.OK) - public Result updateTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id, - @RequestParam(value = "tenantCode") String tenantCode, - @RequestParam(value = "tenantName") String tenantName, - @RequestParam(value = "queueId") int queueId, - @RequestParam(value = "desc",required = false) String desc) { - logger.info("login user {}, updateProcessInstance tenant, tenantCode: {}, tenantName: {}, queueId: {}, desc: {}", - loginUser.getUserName(), tenantCode, tenantName, queueId,desc); - try { - Map result = tenantService.updateTenant(loginUser,id,tenantCode, tenantName, queueId, desc); - return returnDataList(result); - }catch (Exception e){ - logger.error(UPDATE_TENANT_ERROR.getMsg(),e); - return error(Status.UPDATE_TENANT_ERROR.getCode(),UPDATE_TENANT_ERROR.getMsg()); - } - } - - /** - * delete tenant by id - * - * @param loginUser - * @param id - * @return - */ - @ApiOperation(value = "deleteTenantById", notes= "DELETE_TENANT_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType ="Int", example = "100") - - }) - @PostMapping(value = "/delete") - @ResponseStatus(HttpStatus.OK) - public Result deleteTenantById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id) { - logger.info("login user {}, delete tenant, tenantCode: {},", loginUser.getUserName(), id); - try { - Map result = tenantService.deleteTenantById(loginUser,id); - return returnDataList(result); - }catch (Exception e){ - logger.error(DELETE_TENANT_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_TENANT_BY_ID_ERROR.getCode(), Status.DELETE_TENANT_BY_ID_ERROR.getMsg()); - } - } - - - /** - * verify tenant code - * - * @param loginUser - * @param tenantCode - * @return - */ - @ApiOperation(value = "verifyTenantCode", notes= "VERIFY_TENANT_CODE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String") - }) - @GetMapping(value = "/verify-tenant-code") - @ResponseStatus(HttpStatus.OK) - public Result verifyTenantCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="tenantCode") String tenantCode - ) { - - try{ - logger.info("login user {}, verfiy tenant code: {}", - loginUser.getUserName(),tenantCode); - return tenantService.verifyTenantCode(tenantCode); - }catch (Exception e){ - logger.error(VERIFY_TENANT_CODE_ERROR.getMsg(),e); - return error(Status.VERIFY_TENANT_CODE_ERROR.getCode(), Status.VERIFY_TENANT_CODE_ERROR.getMsg()); - } - } - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/UsersController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/UsersController.java deleted file mode 100644 index 4b9bd08c74..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/UsersController.java +++ /dev/null @@ -1,456 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.UsersService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.enums.Status.*; - - -/** - * user controller - */ -@Api(tags = "USERS_TAG" , position = 14) -@RestController -@RequestMapping("/users") -public class UsersController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(UsersController.class); - - @Autowired - private UsersService usersService; - - /** - * create user - * - * @param loginUser - * @param userName - * @param userPassword - * @param email - * @param tenantId - * @param phone - * @return - */ - @ApiOperation(value = "createUser", notes= "CREATE_USER_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String"), - @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type ="String"), - @ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "phone", value = "PHONE", dataType = "Int", example = "100") - }) - @PostMapping(value = "/create") - @ResponseStatus(HttpStatus.CREATED) - public Result createUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userName") String userName, - @RequestParam(value = "userPassword") String userPassword, - @RequestParam(value = "tenantId") int tenantId, - @RequestParam(value = "queue",required = false,defaultValue = "") String queue, - @RequestParam(value = "email") String email, - @RequestParam(value = "phone", required = false) String phone) { - logger.info("login user {}, create user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}", - loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue); - - try { - Map result = usersService.createUser(loginUser, userName, userPassword,email,tenantId, phone,queue); - return returnDataList(result); - }catch (Exception e){ - logger.error(CREATE_USER_ERROR.getMsg(),e); - return error(CREATE_USER_ERROR.getCode(), CREATE_USER_ERROR.getMsg()); - } - } - - /** - * query user list paging - * - * @param loginUser - * @param pageNo - * @param searchVal - * @param pageSize - * @return - */ - @ApiOperation(value = "queryUserList", notes= "QUERY_USER_LIST_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", type ="String"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String") - }) - @GetMapping(value="/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryUserList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ - logger.info("login user {}, list user paging, pageNo: {}, searchVal: {}, pageSize: {}", - loginUser.getUserName(),pageNo,searchVal,pageSize); - try{ - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = usersService.queryUserList(loginUser, searchVal, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_USER_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_USER_LIST_PAGING_ERROR.getCode(), Status.QUERY_USER_LIST_PAGING_ERROR.getMsg()); - } - } - - - /** - * update user - * - * @param loginUser - * @param id - * @param userName - * @param userPassword - * @param email - * @param tenantId - * @param phone - * @return - */ - @ApiOperation(value = "updateUser", notes= "UPDATE_USER_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String"), - @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type ="String"), - @ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "phone", value = "PHONE", dataType = "Int", example = "100") - }) - @PostMapping(value = "/update") - @ResponseStatus(HttpStatus.OK) - public Result updateUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id, - @RequestParam(value = "userName") String userName, - @RequestParam(value = "userPassword") String userPassword, - @RequestParam(value = "queue",required = false,defaultValue = "") String queue, - @RequestParam(value = "email") String email, - @RequestParam(value = "tenantId") int tenantId, - @RequestParam(value = "phone", required = false) String phone) { - logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}", - loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue); - try { - Map result = usersService.updateUser(id, userName, userPassword, email, tenantId, phone, queue); - return returnDataList(result); - }catch (Exception e){ - logger.error(UPDATE_USER_ERROR.getMsg(),e); - return error(Status.UPDATE_USER_ERROR.getCode(), Status.UPDATE_USER_ERROR.getMsg()); - } - } - - /** - * delete user by id - * @param loginUser - * @param id - * @return - */ - @ApiOperation(value = "delUserById", notes= "DELETE_USER_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "USER_ID",dataType = "Int", example = "100") - }) - @PostMapping(value = "/delete") - @ResponseStatus(HttpStatus.OK) - public Result delUserById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id) { - logger.info("login user {}, delete user, userId: {},", loginUser.getUserName(), id); - try { - Map result = usersService.deleteUserById(loginUser, id); - return returnDataList(result); - }catch (Exception e){ - logger.error(DELETE_USER_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_USER_BY_ID_ERROR.getCode(), Status.DELETE_USER_BY_ID_ERROR.getMsg()); - } - } - - /** - * grant project - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "grantProject", notes= "GRANT_PROJECT_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "projectIds", value = "PROJECT_IDS",type = "String") - }) - @PostMapping(value = "/grant-project") - @ResponseStatus(HttpStatus.OK) - public Result grantProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "projectIds") String projectIds) { - logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(), userId,projectIds); - try { - Map result = usersService.grantProject(loginUser, userId, projectIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(GRANT_PROJECT_ERROR.getMsg(),e); - return error(Status.GRANT_PROJECT_ERROR.getCode(), Status.GRANT_PROJECT_ERROR.getMsg()); - } - } - - /** - * grant resource - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "grantResource", notes= "GRANT_RESOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "resourceIds", value = "RESOURCE_IDS",type = "String") - }) - @PostMapping(value = "/grant-file") - @ResponseStatus(HttpStatus.OK) - public Result grantResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "resourceIds") String resourceIds) { - logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,resourceIds); - try { - Map result = usersService.grantResources(loginUser, userId, resourceIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(GRANT_RESOURCE_ERROR.getMsg(),e); - return error(Status.GRANT_RESOURCE_ERROR.getCode(), Status.GRANT_RESOURCE_ERROR.getMsg()); - } - } - - - /** - * grant udf function - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "grantUDFFunc", notes= "GRANT_UDF_FUNC_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "udfIds", value = "UDF_IDS",type = "String") - }) - @PostMapping(value = "/grant-udf-func") - @ResponseStatus(HttpStatus.OK) - public Result grantUDFFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "udfIds") String udfIds) { - logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,udfIds); - try { - Map result = usersService.grantUDFFunction(loginUser, userId, udfIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(GRANT_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.GRANT_UDF_FUNCTION_ERROR.getCode(), Status.GRANT_UDF_FUNCTION_ERROR.getMsg()); - } - } - - - - /** - * grant datasource - * - * @param loginUser - * @param userId - * @return - */ - @ApiOperation(value = "grantDataSource", notes= "GRANT_DATASOURCE_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "datasourceIds", value = "DATASOURCE_IDS",type = "String") - }) - @PostMapping(value = "/grant-datasource") - @ResponseStatus(HttpStatus.OK) - public Result grantDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "datasourceIds") String datasourceIds) { - logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(),userId,datasourceIds); - try { - Map result = usersService.grantDataSource(loginUser, userId, datasourceIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(GRANT_DATASOURCE_ERROR.getMsg(),e); - return error(Status.GRANT_DATASOURCE_ERROR.getCode(), Status.GRANT_DATASOURCE_ERROR.getMsg()); - } - } - - - /** - * get user info - * - * @param loginUser - * @return - */ - @ApiOperation(value = "getUserInfo", notes= "GET_USER_INFO_NOTES") - @GetMapping(value="/get-user-info") - @ResponseStatus(HttpStatus.OK) - public Result getUserInfo(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ - logger.info("login user {},get user info : {}", loginUser.getUserName()); - try{ - Map result = usersService.getUserInfo(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(GET_USER_INFO_ERROR.getMsg(),e); - return error(Status.GET_USER_INFO_ERROR.getCode(), Status.GET_USER_INFO_ERROR.getMsg()); - } - } - - /** - * user list no paging - * - * @param loginUser - * @return - */ - @ApiOperation(value = "listUser", notes= "LIST_USER_NOTES") - @GetMapping(value="/list") - @ResponseStatus(HttpStatus.OK) - public Result listUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ - logger.info("login user {}, user list"); - try{ - Map result = usersService.queryAllGeneralUsers(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(USER_LIST_ERROR.getMsg(),e); - return error(Status.USER_LIST_ERROR.getCode(), Status.USER_LIST_ERROR.getMsg()); - } - } - - - /** - * user list no paging - * - * @param loginUser - * @return - */ - @GetMapping(value="/list-all") - @ResponseStatus(HttpStatus.OK) - public Result listAll(@RequestAttribute(value = Constants.SESSION_USER) User loginUser){ - logger.info("login user {}, user list"); - try{ - Map result = usersService.queryUserList(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(USER_LIST_ERROR.getMsg(),e); - return error(Status.USER_LIST_ERROR.getCode(), Status.USER_LIST_ERROR.getMsg()); - } - } - - - /** - * verify username - * - * @param loginUser - * @param userName - * @return - */ - @ApiOperation(value = "verifyUserName", notes= "VERIFY_USER_NAME_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String") - }) - @GetMapping(value = "/verify-user-name") - @ResponseStatus(HttpStatus.OK) - public Result verifyUserName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="userName") String userName - ) { - try{ - - logger.info("login user {}, verfiy user name: {}", - loginUser.getUserName(),userName); - return usersService.verifyUserName(userName); - }catch (Exception e){ - logger.error(VERIFY_USERNAME_ERROR.getMsg(),e); - return error(Status.VERIFY_USERNAME_ERROR.getCode(), Status.VERIFY_USERNAME_ERROR.getMsg()); - } - } - - - /** - * unauthorized user - * - * @param loginUser - * @param alertgroupId - * @return - */ - @ApiOperation(value = "unauthorizedUser", notes= "UNAUTHORIZED_USER_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID",type = "String") - }) - @GetMapping(value = "/unauth-user") - @ResponseStatus(HttpStatus.OK) - public Result unauthorizedUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("alertgroupId") Integer alertgroupId) { - try{ - logger.info("unauthorized user, login user:{}, alert group id:{}", - loginUser.getUserName(), alertgroupId); - Map result = usersService.unauthorizedUser(loginUser, alertgroupId); - return returnDataList(result); - }catch (Exception e){ - logger.error(UNAUTHORIZED_USER_ERROR.getMsg(),e); - return error(Status.UNAUTHORIZED_USER_ERROR.getCode(), Status.UNAUTHORIZED_USER_ERROR.getMsg()); - } - } - - - /** - * authorized user - * - * @param loginUser - * @param alertgroupId - * @return - */ - @ApiOperation(value = "authorizedUser", notes= "AUTHORIZED_USER_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID",type = "String") - }) - @GetMapping(value = "/authed-user") - @ResponseStatus(HttpStatus.OK) - public Result authorizedUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("alertgroupId") Integer alertgroupId) { - try{ - logger.info("authorized user , login user:{}, alert group id:{}", - loginUser.getUserName(), alertgroupId); - Map result = usersService.authorizedUser(loginUser, alertgroupId); - return returnDataList(result); - }catch (Exception e){ - logger.error(AUTHORIZED_USER_ERROR.getMsg(),e); - return error(Status.AUTHORIZED_USER_ERROR.getCode(), Status.AUTHORIZED_USER_ERROR.getMsg()); - } - } - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/WorkerGroupController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/WorkerGroupController.java deleted file mode 100644 index 564da50be3..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/WorkerGroupController.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.service.WorkerGroupService; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; - -import static cn.escheduler.api.utils.Constants.SESSION_USER; - -/** - * worker group controller - */ -@Api(tags = "WORKER_GROUP_TAG", position = 1) -@RestController -@RequestMapping("/worker-group") -public class WorkerGroupController extends BaseController{ - - private static final Logger logger = LoggerFactory.getLogger(WorkerGroupController.class); - - - @Autowired - WorkerGroupService workerGroupService; - - - /** - * create or update a worker group - * @param loginUser - * @param id - * @param name - * @param ipList - * @return - */ - @ApiOperation(value = "saveWorkerGroup", notes= "CREATE_WORKER_GROUP_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", dataType = "Int", example = "10", defaultValue = "0"), - @ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType ="String") - }) - @PostMapping(value = "/save") - @ResponseStatus(HttpStatus.OK) - public Result saveWorkerGroup(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @RequestParam(value = "id", required = false, defaultValue = "0") int id, - @RequestParam(value = "name") String name, - @RequestParam(value = "ipList") String ipList - ) { - logger.info("save worker group: login user {}, id:{}, name: {}, ipList: {} ", - loginUser.getUserName(), id, name, ipList); - - try { - Map result = workerGroupService.saveWorkerGroup(id, name, ipList); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.SAVE_ERROR.getMsg(),e); - return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg()); - } - } - - /** - * query worker groups paging - * @param loginUser - * @param pageNo - * @param searchVal - * @param pageSize - * @return - */ - @ApiOperation(value = "queryAllWorkerGroupsPaging", notes= "QUERY_WORKER_GROUP_PAGING_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", dataType = "Int", example = "10", defaultValue = "0"), - @ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType ="String") - }) - @GetMapping(value = "/list-paging") - @ResponseStatus(HttpStatus.OK) - public Result queryAllWorkerGroupsPaging(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize - ) { - logger.info("query all worker group paging: login user {}, pageNo:{}, pageSize:{}, searchVal:{}", - loginUser.getUserName() , pageNo, pageSize, searchVal); - - try { - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = workerGroupService.queryAllGroupPaging(pageNo, pageSize, searchVal); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.SAVE_ERROR.getMsg(),e); - return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg()); - } - } - - /** - * query all worker groups - * @param loginUser - * @return - */ - @ApiOperation(value = "queryAllWorkerGroups", notes= "QUERY_WORKER_GROUP_LIST_NOTES") - @GetMapping(value = "/all-groups") - @ResponseStatus(HttpStatus.OK) - public Result queryAllWorkerGroups(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser - ) { - logger.info("query all worker group: login user {}", - loginUser.getUserName() ); - - try { - Map result = workerGroupService.queryAllGroup(); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.SAVE_ERROR.getMsg(),e); - return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg()); - } - } - - /** - * delete worker group by id - * @param loginUser - * @param id - * @return - */ - @ApiOperation(value = "deleteById", notes= "DELETE_WORKER_GROUP_BY_ID_NOTES") - @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", required = true, dataType = "Int", example = "10"), - - }) - @GetMapping(value = "/delete-by-id") - @ResponseStatus(HttpStatus.OK) - public Result deleteById(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @RequestParam("id") Integer id - ) { - logger.info("delete worker group: login user {}, id:{} ", - loginUser.getUserName() , id); - - try { - Map result = workerGroupService.deleteWorkerGroupById(id); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.SAVE_ERROR.getMsg(),e); - return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg()); - } - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/dto/CommandStateCount.java b/escheduler-api/src/main/java/cn/escheduler/api/dto/CommandStateCount.java deleted file mode 100644 index 00abfe1892..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/dto/CommandStateCount.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.dto; - -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.enums.ExecutionStatus; - -/** - * command state count - */ -public class CommandStateCount { - - private int errorCount; - private int normalCount; - private CommandType commandState; - - public CommandStateCount(){} - public CommandStateCount(int errorCount, int normalCount, CommandType commandState) { - this.errorCount = errorCount; - this.normalCount = normalCount; - this.commandState = commandState; - } - - public int getErrorCount() { - return errorCount; - } - - public void setErrorCount(int errorCount) { - this.errorCount = errorCount; - } - - public int getNormalCount() { - return normalCount; - } - - public void setNormalCount(int normalCount) { - this.normalCount = normalCount; - } - - public CommandType getCommandState() { - return commandState; - } - - public void setCommandState(CommandType commandState) { - this.commandState = commandState; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/dto/DefineUserDto.java b/escheduler-api/src/main/java/cn/escheduler/api/dto/DefineUserDto.java deleted file mode 100644 index 36b5d907ed..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/dto/DefineUserDto.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.dto; - -import cn.escheduler.dao.entity.DefinitionGroupByUser; - -import java.util.List; - -/** - * - */ -public class DefineUserDto { - - private int count; - - private List userList; - - public DefineUserDto(List defineGroupByUsers) { - - for(DefinitionGroupByUser define : defineGroupByUsers){ - count += define.getCount(); - } - this.userList = defineGroupByUsers; - } - - public int getCount() { - return count; - } - - public void setCount(int count) { - this.count = count; - } - - public List getUserList() { - return userList; - } - - public void setUserList(List userList) { - this.userList = userList; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/dto/ScheduleParam.java b/escheduler-api/src/main/java/cn/escheduler/api/dto/ScheduleParam.java deleted file mode 100644 index 5b08fbc08f..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/dto/ScheduleParam.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.dto; - -import java.util.Date; - -/** - * schedule parameters - * 调度参数 - */ -public class ScheduleParam { - private Date startTime; - private Date endTime; - private String crontab; - - public ScheduleParam() { - } - - public ScheduleParam(Date startTime, Date endTime, String crontab) { - this.startTime = startTime; - this.endTime = endTime; - this.crontab = crontab; - } - - public Date getStartTime() { - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public Date getEndTime() { - return endTime; - } - - public void setEndTime(Date endTime) { - this.endTime = endTime; - } - - public String getCrontab() { - return crontab; - } - - public void setCrontab(String crontab) { - this.crontab = crontab; - } - - - @Override - public String toString() { - return "ScheduleParam{" + - "startTime=" + startTime + - ", endTime=" + endTime + - ", crontab='" + crontab + '\'' + - '}'; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/dto/TaskCountDto.java b/escheduler-api/src/main/java/cn/escheduler/api/dto/TaskCountDto.java deleted file mode 100644 index 786560b7b4..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/dto/TaskCountDto.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.dto; - -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.dao.entity.ExecuteStatusCount; - -import java.util.ArrayList; -import java.util.List; - -/** - * task count dto - */ -public class TaskCountDto { - - /** - * total count - */ - private int totalCount; - - /** - * - */ - private List taskCountDtos; - - - public TaskCountDto(List taskInstanceStateCounts) { - countTaskDtos(taskInstanceStateCounts); - } - - private void countTaskDtos(List taskInstanceStateCounts){ - int submitted_success = 0; - int running_exeution = 0; - int ready_pause = 0; - int pause = 0; - int ready_stop = 0; - int stop = 0; - int failure = 0; - int success = 0; - int need_fault_tolerance = 0; - int kill = 0; - int waitting_thread = 0; - int waitting_depend = 0; - - for(ExecuteStatusCount taskInstanceStateCount : taskInstanceStateCounts){ - ExecutionStatus status = taskInstanceStateCount.getExecutionStatus(); - totalCount += taskInstanceStateCount.getCount(); - switch (status){ - case SUBMITTED_SUCCESS: - submitted_success += taskInstanceStateCount.getCount(); - break; - case RUNNING_EXEUTION: - running_exeution += taskInstanceStateCount.getCount(); - break; - case READY_PAUSE: - ready_pause += taskInstanceStateCount.getCount(); - break; - case PAUSE: - pause += taskInstanceStateCount.getCount(); - break; - case READY_STOP: - ready_stop += taskInstanceStateCount.getCount(); - break; - case STOP: - stop += taskInstanceStateCount.getCount(); - break; - case FAILURE: - failure += taskInstanceStateCount.getCount(); - break; - case SUCCESS: - success += taskInstanceStateCount.getCount(); - break; - case NEED_FAULT_TOLERANCE: - failure += taskInstanceStateCount.getCount(); - break; - case KILL: - kill += taskInstanceStateCount.getCount(); - break; - case WAITTING_THREAD: - kill += taskInstanceStateCount.getCount(); - break; - case WAITTING_DEPEND: - kill += taskInstanceStateCount.getCount(); - break; - - default: - break; - } - } - this.taskCountDtos = new ArrayList<>(); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUBMITTED_SUCCESS, submitted_success)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXEUTION, running_exeution)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_PAUSE, ready_pause)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.PAUSE, pause)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_STOP, ready_stop)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.STOP, stop)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.FAILURE, failure)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUCCESS, success)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.NEED_FAULT_TOLERANCE, need_fault_tolerance)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.KILL, kill)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_THREAD, waitting_thread)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_DEPEND, waitting_depend)); - } - - - public List getTaskCountDtos(){ - return taskCountDtos; - } - - public void setTaskCountDtos(List taskCountDtos) { - this.taskCountDtos = taskCountDtos; - } - - public int getTotalCount() { - return totalCount; - } - - public void setTotalCount(int totalCount) { - this.totalCount = totalCount; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/dto/TaskStateCount.java b/escheduler-api/src/main/java/cn/escheduler/api/dto/TaskStateCount.java deleted file mode 100644 index 8d293207b4..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/dto/TaskStateCount.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.dto; - -import cn.escheduler.common.enums.ExecutionStatus; - -/** - * task state count - */ -public class TaskStateCount { - - private int count; - private ExecutionStatus taskStateType; - - public TaskStateCount(ExecutionStatus taskStateType, int count) { - this.taskStateType = taskStateType; - this.count = count; - } - - - public int getCount() { - return count; - } - - public void setCount(int count) { - this.count = count; - } - - public ExecutionStatus getTaskStateType() { - return taskStateType; - } - - public void setTaskStateType(ExecutionStatus taskStateType) { - this.taskStateType = taskStateType; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/dto/gantt/GanttDto.java b/escheduler-api/src/main/java/cn/escheduler/api/dto/gantt/GanttDto.java deleted file mode 100644 index f9f900a827..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/dto/gantt/GanttDto.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.dto.gantt; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * gantt DTO - * 甘特图 DTO - */ -public class GanttDto { - - /** - * height - * 高度 - */ - private int height; - - /** - * tasks list - * 任务集合 - */ - private List tasks = new ArrayList<>(); - - /** - * task name list - * 任务名称 - */ - private List taskNames; - - /** - * task status map - * 任务状态 - */ - private Map taskStatus; - - - public GanttDto(){ - this.taskStatus = new HashMap<>(); - taskStatus.put("success","success"); - } - public GanttDto(int height, List tasks, List taskNames){ - this(); - this.height = height; - this.tasks = tasks; - this.taskNames = taskNames;; - } - public GanttDto(int height, List tasks, List taskNames, Map taskStatus) { - this.height = height; - this.tasks = tasks; - this.taskNames = taskNames; - this.taskStatus = taskStatus; - } - - public int getHeight() { - return height; - } - - public void setHeight(int height) { - this.height = height; - } - - public List getTasks() { - return tasks; - } - - public void setTasks(List tasks) { - this.tasks = tasks; - } - - public List getTaskNames() { - return taskNames; - } - - public void setTaskNames(List taskNames) { - this.taskNames = taskNames; - } - - public Map getTaskStatus() { - return taskStatus; - } - - public void setTaskStatus(Map taskStatus) { - this.taskStatus = taskStatus; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/dto/gantt/Task.java b/escheduler-api/src/main/java/cn/escheduler/api/dto/gantt/Task.java deleted file mode 100644 index 69f46134f2..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/dto/gantt/Task.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.dto.gantt; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -/** - * Task - * 任务 - */ -public class Task { - /** - * task name - * 任务名称 - */ - private String taskName; - - /** - * task start date - * 任务开始时间 - */ - private List startDate = new ArrayList<>(); - /** - * task end date - * 任务结束时间 - */ - private List endDate = new ArrayList<>(); - - /** - * task execution date - * 任务执行时间 - */ - private Date executionDate; - - /** - * task iso start - * 任务开始时间 - */ - private Date isoStart; - - /** - * task iso end - * 任务结束时间 - */ - private Date isoEnd; - - /** - * task status - * 执行状态 - */ - private String status; - - /** - * task duration - * 运行时长 - */ - private String duration; - - public String getTaskName() { - return taskName; - } - - public void setTaskName(String taskName) { - this.taskName = taskName; - } - - public List getStartDate() { - return startDate; - } - - public void setStartDate(List startDate) { - this.startDate = startDate; - } - - public List getEndDate() { - return endDate; - } - - public void setEndDate(List endDate) { - this.endDate = endDate; - } - - public Date getExecutionDate() { - return executionDate; - } - - public void setExecutionDate(Date executionDate) { - this.executionDate = executionDate; - } - - public Date getIsoStart() { - return isoStart; - } - - public void setIsoStart(Date isoStart) { - this.isoStart = isoStart; - } - - public Date getIsoEnd() { - return isoEnd; - } - - public void setIsoEnd(Date isoEnd) { - this.isoEnd = isoEnd; - } - - public String getStatus() { - return status; - } - - public void setStatus(String status) { - this.status = status; - } - - public String getDuration() { - return duration; - } - - public void setDuration(String duration) { - this.duration = duration; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/dto/treeview/Instance.java b/escheduler-api/src/main/java/cn/escheduler/api/dto/treeview/Instance.java deleted file mode 100644 index e569d3ba95..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/dto/treeview/Instance.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.dto.treeview; - -import java.util.Date; - -/** - * Instance - */ -public class Instance { - - private int id; - /** - * node name - * 节点名称 - */ - private String name; - - /** - * node type - * 节点类型 - */ - private String type; - - /** - * node status - * 状态 - */ - private String state; - - /** - * node start time - * 开始时间 - */ - private Date startTime; - - /** - * node end time - * 结束时间 - */ - private Date endTime; - - - - /** - * node running on which host - * 运行机器 - */ - private String host; - - /** - * node duration - * 运行时长 - */ - private String duration; - - private int subflowId; - - - public Instance(){} - - public Instance(int id,String name, String type){ - this.id = id; - this.name = name; - this.type = type; - } - - public Instance(int id,String name, String type,String state,Date startTime, Date endTime, String host, String duration,int subflowId) { - this.id = id; - this.name = name; - this.type = type; - this.state = state; - this.startTime = startTime; - this.endTime = endTime; - this.host = host; - this.duration = duration; - this.subflowId = subflowId; - } - - public Instance(int id,String name, String type,String state,Date startTime, Date endTime, String host, String duration) { - this(id, name, type, state, startTime, endTime,host,duration,0); - } - - - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public String getState() { - return state; - } - - public void setState(String state) { - this.state = state; - } - - public Date getStartTime() { - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public Date getEndTime() { - return endTime; - } - - public void setEndTime(Date endTime) { - this.endTime = endTime; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public String getDuration() { - return duration; - } - - public void setDuration(String duration) { - this.duration = duration; - } - - public int getSubflowId() { - return subflowId; - } - - public void setSubflowId(int subflowId) { - this.subflowId = subflowId; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/dto/treeview/TreeViewDto.java b/escheduler-api/src/main/java/cn/escheduler/api/dto/treeview/TreeViewDto.java deleted file mode 100644 index 35ca7f8aa0..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/dto/treeview/TreeViewDto.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.dto.treeview; - -import java.util.ArrayList; -import java.util.List; - -/** - * TreeView - */ -public class TreeViewDto { - - /** - * name - */ - private String name; - - /** - * type - */ - private String type; - - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - /** - * instances list - * 实例列表 - */ - - private List instances = new ArrayList<>(); - - /** - * children - */ - private List children = new ArrayList<>(); - - - public List getInstances() { - return instances; - } - - public void setInstances(List instances) { - this.instances = instances; - } - - public List getChildren() { - return children; - } - - public void setChildren(List children) { - this.children = children; - } - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/enums/ExecuteType.java b/escheduler-api/src/main/java/cn/escheduler/api/enums/ExecuteType.java deleted file mode 100644 index b09f424b30..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/enums/ExecuteType.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.enums; - -/** - * execute type - */ -public enum ExecuteType { - - - /** - * 操作类型 - * 1.重跑 2.恢复暂停 3.恢复失败 4.停止 5.暂停 - */ - NONE,REPEAT_RUNNING, RECOVER_SUSPENDED_PROCESS, START_FAILURE_TASK_PROCESS, STOP, PAUSE; - - - public static ExecuteType getEnum(int value){ - for (ExecuteType e: ExecuteType.values()) { - if(e.ordinal() == value) { - return e; - } - } - return null;//For values out of enum scope - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java b/escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java deleted file mode 100644 index 802dddfb99..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.enums; - -/** - * status enum - */ -public enum Status { - - SUCCESS(0, "success"), - - REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid"), - TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid"), - USER_NAME_EXIST(10003, "user name already exists"), - USER_NAME_NULL(10004,"user name is null"), -// DB_OPERATION_ERROR(10005, "database operation error"), - HDFS_OPERATION_ERROR(10006, "hdfs operation error"), - UPDATE_FAILED(10007, "updateProcessInstance failed"), - TASK_INSTANCE_NOT_FOUND(10008, "task instance not found"), - TENANT_NAME_EXIST(10009, "tenant code already exists"), - USER_NOT_EXIST(10010, "user {0} not exists"), - ALERT_GROUP_NOT_EXIST(10011, "alarm group not found"), - ALERT_GROUP_EXIST(10012, "alarm group already exists"), - USER_NAME_PASSWD_ERROR(10013,"user name or password error"), - LOGIN_SESSION_FAILED(10014,"create session failed!"), - DATASOURCE_EXIST(10015, "data source name already exists"), - DATASOURCE_CONNECT_FAILED(10016, "data source connection failed"), - TENANT_NOT_EXIST(10017, "tenant not exists"), - PROJECT_NOT_FOUNT(10018, "project {0} not found "), - PROJECT_ALREADY_EXISTS(10019, "project {0} already exists"), - TASK_INSTANCE_NOT_EXISTS(10020, "task instance {0} does not exist"), - TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE(10021, "task instance {0} is not sub process instance"), - SCHEDULE_CRON_NOT_EXISTS(10022, "scheduler crontab {0} does not exist"), - SCHEDULE_CRON_ONLINE_FORBID_UPDATE(10023, "online status does not allow updateProcessInstance operations"), - SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}"), - MASTER_NOT_EXISTS(10025, "master does not exist"), - SCHEDULE_STATUS_UNKNOWN(10026, "unknown command: {0}"), - CREATE_ALERT_GROUP_ERROR(10027,"create alert group error"), - QUERY_ALL_ALERTGROUP_ERROR(10028,"query all alertgroup error"), - LIST_PAGING_ALERT_GROUP_ERROR(10029,"list paging alert group error"), - UPDATE_ALERT_GROUP_ERROR(10030,"updateProcessInstance alert group error"), - DELETE_ALERT_GROUP_ERROR(10031,"delete alert group error"), - ALERT_GROUP_GRANT_USER_ERROR(10032,"alert group grant user error"), - CREATE_DATASOURCE_ERROR(10033,"create datasource error"), - UPDATE_DATASOURCE_ERROR(10034,"updateProcessInstance datasource error"), - QUERY_DATASOURCE_ERROR(10035,"query datasource error"), - CONNECT_DATASOURCE_FAILURE(10036,"connect datasource failure"), - CONNECTION_TEST_FAILURE(10037,"connection test failure"), - DELETE_DATA_SOURCE_FAILURE(10038,"delete data source failure"), - VERFIY_DATASOURCE_NAME_FAILURE(10039,"verfiy datasource name failure"), - UNAUTHORIZED_DATASOURCE(10040,"unauthorized datasource"), - AUTHORIZED_DATA_SOURCE(10041,"authorized data source"), - LOGIN_SUCCESS(10042,"login success"), - USER_LOGIN_FAILURE(10043,"user login failure"), - LIST_WORKERS_ERROR(10044,"list workers error"), - LIST_MASTERS_ERROR(10045,"list masters error"), - UPDATE_PROJECT_ERROR(10046,"updateProcessInstance project error"), - QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047,"query project details by id error"), - CREATE_PROJECT_ERROR(10048,"create project error"), - LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049,"login user query project list paging error"), - DELETE_PROJECT_ERROR(10050,"delete project error"), - QUERY_UNAUTHORIZED_PROJECT_ERROR(10051,"query unauthorized project error"), - QUERY_AUTHORIZED_PROJECT(10052,"query authorized project"), - QUERY_QUEUE_LIST_ERROR(10053,"query queue list error"), - CREATE_RESOURCE_ERROR(10054,"create resource error"), - UPDATE_RESOURCE_ERROR(10055,"updateProcessInstance resource error"), - QUERY_RESOURCES_LIST_ERROR(10056,"query resources list error"), - QUERY_RESOURCES_LIST_PAGING(10057,"query resources list paging"), - DELETE_RESOURCE_ERROR(10058,"delete resource error"), - VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059,"verify resource by name and type error"), - VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060,"view resource file online error"), - CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061,"create resource file online error"), - RESOURCE_FILE_IS_EMPTY(10062,"resource file is empty"), - EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063,"edit resource file online error"), - DOWNLOAD_RESOURCE_FILE_ERROR(10064,"download resource file error"), - CREATE_UDF_FUNCTION_ERROR(10065 ,"create udf function error"), - VIEW_UDF_FUNCTION_ERROR( 10066,"view udf function error"), - UPDATE_UDF_FUNCTION_ERROR(10067,"updateProcessInstance udf function error"), - QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( 10068,"query udf function list paging error"), - QUERY_DATASOURCE_BY_TYPE_ERROR( 10069,"query datasource by type error"), - VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error"), - DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error"), - AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error"), - UNAUTHORIZED_FILE_RESOURCE_ERROR( 10073,"unauthorized file resource error"), - UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error"), - AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error"), - CREATE_SCHEDULE_ERROR(10076,"create schedule error"), - UPDATE_SCHEDULE_ERROR(10077,"updateProcessInstance schedule error"), - PUBLISH_SCHEDULE_ONLINE_ERROR(10078,"publish schedule online error"), - OFFLINE_SCHEDULE_ERROR(10079,"offline schedule error"), - QUERY_SCHEDULE_LIST_PAGING_ERROR(10080,"query schedule list paging error"), - QUERY_SCHEDULE_LIST_ERROR(10081,"query schedule list error"), - QUERY_TASK_LIST_PAGING_ERROR(10082,"query task list paging error"), - QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083,"query task record list paging error"), - CREATE_TENANT_ERROR(10084,"create tenant error"), - QUERY_TENANT_LIST_PAGING_ERROR(10085,"query tenant list paging error"), - QUERY_TENANT_LIST_ERROR(10086,"query tenant list error"), - UPDATE_TENANT_ERROR(10087,"updateProcessInstance tenant error"), - DELETE_TENANT_BY_ID_ERROR(10088,"delete tenant by id error"), - VERIFY_TENANT_CODE_ERROR(10089,"verify tenant code error"), - CREATE_USER_ERROR(10090,"create user error"), - QUERY_USER_LIST_PAGING_ERROR(10091,"query user list paging error"), - UPDATE_USER_ERROR(10092,"updateProcessInstance user error"), - DELETE_USER_BY_ID_ERROR(10093,"delete user by id error"), - GRANT_PROJECT_ERROR(10094,"grant project error"), - GRANT_RESOURCE_ERROR(10095,"grant resource error"), - GRANT_UDF_FUNCTION_ERROR(10096,"grant udf function error"), - GRANT_DATASOURCE_ERROR(10097,"grant datasource error"), - GET_USER_INFO_ERROR(10098,"get user info error"), - USER_LIST_ERROR(10099,"user list error"), - VERIFY_USERNAME_ERROR(10100,"verify username error"), - UNAUTHORIZED_USER_ERROR(10101,"unauthorized user error"), - AUTHORIZED_USER_ERROR(10102,"authorized user error"), - QUERY_TASK_INSTANCE_LOG_ERROR(10103,"view task instance log error"), - DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104,"download task instance log file error"), - CREATE_PROCESS_DEFINITION(10105,"create process definition"), - VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106,"verify process definition name unique error"), - UPDATE_PROCESS_DEFINITION_ERROR(10107,"updateProcessInstance process definition error"), - RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error"), - QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error"), - QUERY_PROCCESS_DEFINITION_LIST(10110,"query proccess definition list"), - ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error"), - GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error"), - QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error"), - QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114,"query task list by process instance id error"), - UPDATE_PROCESS_INSTANCE_ERROR(10115,"updateProcessInstance process instance error"), - QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116,"query process instance by id error"), - DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117,"delete process instance by id error"), - QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,"query sub process instance detail info by task id error"), - QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error"), - QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error"), - ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error"), - QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query proccess definition list paging error"), - SIGN_OUT_ERROR(10123,"sign out error"), - TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists"), - IP_IS_EMPTY(10125,"ip is empty"), - SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}"), - CREATE_QUEUE_ERROR(10127, "create queue error"), - QUEUE_NOT_EXIST(10128, "queue {0} not exists"), - QUEUE_VALUE_EXIST(10129, "queue value {0} already exists"), - QUEUE_NAME_EXIST(10130, "queue name {0} already exists"), - UPDATE_QUEUE_ERROR(10131, "update queue error"), - NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required"), - VERIFY_QUEUE_ERROR(10133,"verify queue error"), - NAME_NULL(10134,"name must be not null"), - NAME_EXIST(10135, "name {0} already exists"), - SAVE_ERROR(10136, "save error"), - DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!"), - BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117,"batch delete process instance by ids {0} error"), - PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error"), - PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error"), - SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end"), - - - UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found"), - UDF_FUNCTION_EXISTS(20002, "UDF function already exists"), -// RESOURCE_EMPTY(20003, "resource file is empty"), - RESOURCE_NOT_EXIST(20004, "resource not exist"), - RESOURCE_EXIST(20005, "resource already exists"), - RESOURCE_SUFFIX_NOT_SUPPORT_VIEW(20006, "resource suffix do not support online viewing"), - RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit"), - RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified"), - UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar"), - HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail"), - RESOURCE_FILE_EXIST(20010, "resource file {0} already exists in hdfs,please delete it or change name!"), - RESOURCE_FILE_NOT_EXIST(20011, "resource file {0} not exists in hdfs!"), - - - - USER_NO_OPERATION_PERM(30001, "user has no operation privilege"), - USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission"), - - - PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist"), - PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists"), - PROCESS_DEFINE_NOT_EXIST(50003, "process definition {0} does not exist"), - PROCESS_DEFINE_NOT_RELEASE(50004, "process definition {0} not on line"), - PROCESS_INSTANCE_ALREADY_CHANGED(50005, "the status of process instance {0} is already {1}"), - PROCESS_INSTANCE_STATE_OPERATION_ERROR(50006, "the status of process instance {0} is {1},Cannot perform {2} operation"), - SUB_PROCESS_INSTANCE_NOT_EXIST(50007, "the task belong to process instance does not exist"), - PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit"), - PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ..."), - PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance"), - TASK_INSTANCE_STATE_COUNT_ERROR(50011,"task instance state count error"), - COUNT_PROCESS_INSTANCE_STATE_ERROR(50012,"count process instance state error"), - COUNT_PROCESS_DEFINITION_USER_ERROR(50013,"count process definition user error"), - START_PROCESS_INSTANCE_ERROR(50014,"start process instance error"), - EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error"), - CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error"), - QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error"), - DATA_IS_NOT_VALID(50017,"data %s not valid"), - DATA_IS_NULL(50018,"data %s is null"), - PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle"), - PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node %s parameter invalid"), - PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line"), - DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error"), - SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line"), - DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024,"delete schedule by id error"), - BATCH_DELETE_PROCESS_DEFINE_ERROR(50025,"batch delete process definition error"), - BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error"), - TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available."), - EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error"), - IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error"), - - HDFS_NOT_STARTUP(60001,"hdfs not startup"), - HDFS_TERANT_RESOURCES_FILE_EXISTS(60002,"resource file exists,please delete resource first"), - HDFS_TERANT_UDFS_FILE_EXISTS(60003,"udf file exists,please delete resource first"), - - /** - * for monitor - */ - QUERY_DATABASE_STATE_ERROR(70001,"query database state error"), - QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error"), - - - - CREATE_ACCESS_TOKEN_ERROR(70001,"create access token error"), - GENERATE_TOKEN_ERROR(70002,"generate token error"), - QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70003,"query access token list paging error"), - - - COMMAND_STATE_COUNT_ERROR(80001,"task instance state count error"), - - QUEUE_COUNT_ERROR(90001,"queue count error"), - - KERBEROS_STARTUP_STATE(100001,"get kerberos startup state error"), - ; - - private int code; - private String msg; - - private Status(int code, String msg) { - this.code = code; - this.msg = msg; - } - - public int getCode() { - return this.code; - } - - public void setCode(int code) { - this.code = code; - } - - public String getMsg() { - return this.msg; - } - - public void setMsg(String msg) { - this.msg = msg; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/interceptor/DruidStatFilter.java b/escheduler-api/src/main/java/cn/escheduler/api/interceptor/DruidStatFilter.java deleted file mode 100644 index ea7bff2199..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/interceptor/DruidStatFilter.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.interceptor; - -import com.alibaba.druid.support.http.WebStatFilter; - -/* this class annotation for druid stat monitor in development -@WebFilter(filterName="druidWebStatFilter",urlPatterns="/*", - initParams={ - @WebInitParam(name="exclusions",value="*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*") - }) */ -public class DruidStatFilter extends WebStatFilter { - - -} \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/interceptor/DruidStatViewServlet.java b/escheduler-api/src/main/java/cn/escheduler/api/interceptor/DruidStatViewServlet.java deleted file mode 100644 index 4abac92dfe..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/interceptor/DruidStatViewServlet.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.interceptor; - -import com.alibaba.druid.support.http.StatViewServlet; - - -/* this class annotation for druid stat monitor in development -@WebServlet(urlPatterns = "/druid/*", - initParams={ -// @WebInitParam(name="allow",value="127.0.0.1"), -// @WebInitParam(name="deny",value="192.168.16.111"), - @WebInitParam(name="loginUsername",value="admin"), - @WebInitParam(name="loginPassword",value="escheduler123"), - @WebInitParam(name="resetEnable",value="true") - }) */ -public class DruidStatViewServlet extends StatViewServlet { - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/interceptor/LoginHandlerInterceptor.java b/escheduler-api/src/main/java/cn/escheduler/api/interceptor/LoginHandlerInterceptor.java deleted file mode 100644 index 406bdeff84..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/interceptor/LoginHandlerInterceptor.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.interceptor; - -import cn.escheduler.api.service.SessionService; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.dao.entity.Session; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.UserMapper; -import org.apache.commons.httpclient.HttpStatus; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.web.servlet.HandlerInterceptor; -import org.springframework.web.servlet.ModelAndView; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -/** - * login interceptor, must login first - */ -public class LoginHandlerInterceptor implements HandlerInterceptor { - private static final Logger logger = LoggerFactory.getLogger(LoginHandlerInterceptor.class); - - @Autowired - private SessionService sessionService; - - @Autowired - private UserMapper userMapper; - - /** - * Intercept the execution of a handler. Called after HandlerMapping determined - * an appropriate handler object, but before HandlerAdapter invokes the handler. - *

DispatcherServlet processes a handler in an execution chain, consisting - * of any number of interceptors, with the handler itself at the end. - * With this method, each interceptor can decide to abort the execution chain, - * typically sending a HTTP error or writing a custom response. - *

Note: special considerations apply for asynchronous - * request processing. For more details see - * {@link org.springframework.web.servlet.AsyncHandlerInterceptor}. - * @param request current HTTP request - * @param response current HTTP response - * @param handler chosen handler to execute, for type and/or instance evaluation - * @return {@code true} if the execution chain should proceed with the - * next interceptor or the handler itself. Else, DispatcherServlet assumes - * that this interceptor has already dealt with the response itself. - * @throws Exception in case of errors - */ - @Override - public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) { - - // get token - String token = request.getHeader("token"); - User user = null; - if (StringUtils.isEmpty(token)){ - Session session = sessionService.getSession(request); - - if (session == null) { - response.setStatus(HttpStatus.SC_UNAUTHORIZED); - logger.info("session info is null "); - return false; - } - - //get user object from session - user = userMapper.selectById(session.getUserId()); - - // if user is null - if (user == null) { - response.setStatus(HttpStatus.SC_UNAUTHORIZED); - logger.info("user does not exist"); - return false; - } - }else { - user = userMapper.queryUserByToken(token); - if (user == null) { - response.setStatus(HttpStatus.SC_UNAUTHORIZED); - logger.info("user token has expired"); - return false; - } - } - request.setAttribute(Constants.SESSION_USER, user); - return true; - } - - @Override - public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception { - - } - - @Override - public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception { - - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/log/LogClient.java b/escheduler-api/src/main/java/cn/escheduler/api/log/LogClient.java deleted file mode 100644 index 1940cc0646..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/log/LogClient.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.log; - -import cn.escheduler.rpc.*; -import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; -import io.grpc.StatusRuntimeException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.concurrent.TimeUnit; - -/** - * log client - */ -public class LogClient { - - private static final Logger logger = LoggerFactory.getLogger(LogClient.class); - - private final ManagedChannel channel; - private final LogViewServiceGrpc.LogViewServiceBlockingStub blockingStub; - - /** - * construct client connecting to HelloWorld server at {@code host:port} - */ - public LogClient(String host, int port) { - this(ManagedChannelBuilder.forAddress(host, port) - // Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid - // needing certificates. - .usePlaintext(true)); - } - - /** - * construct client for accessing RouteGuide server using the existing channel - * - */ - LogClient(ManagedChannelBuilder channelBuilder) { - /** - * set max read size - */ - channelBuilder.maxInboundMessageSize(Integer.MAX_VALUE); - channel = channelBuilder.build(); - blockingStub = LogViewServiceGrpc.newBlockingStub(channel); - } - - /** - * shutdown - * - * @throws InterruptedException - */ - public void shutdown() throws InterruptedException { - channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); - } - - /** - * roll view log - * - * @param path - * @param skipLineNum - * @param limit - * @return - */ - public String rollViewLog(String path,int skipLineNum,int limit) { - logger.info("roll view log : path {},skipLineNum {} ,limit {}", path, skipLineNum, limit); - LogParameter pathParameter = LogParameter - .newBuilder() - .setPath(path) - .setSkipLineNum(skipLineNum) - .setLimit(limit) - .build(); - RetStrInfo retStrInfo; - try { - retStrInfo = blockingStub.rollViewLog(pathParameter); - return retStrInfo.getMsg(); - } catch (StatusRuntimeException e) { - logger.error("roll view log error", e); - return null; - } - } - - /** - * view log - * - * @param path - * @return - */ - public String viewLog(String path) { - logger.info("view log path {}",path); - PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); - RetStrInfo retStrInfo; - try { - retStrInfo = blockingStub.viewLog(pathParameter); - return retStrInfo.getMsg(); - } catch (StatusRuntimeException e) { - logger.error("view log error", e); - return null; - } - } - - /** - * get log size - * - * @param path - * @return - */ - public byte[] getLogBytes(String path) { - logger.info("log path {}",path); - PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); - RetByteInfo retByteInfo; - try { - retByteInfo = blockingStub.getLogBytes(pathParameter); - return retByteInfo.getData().toByteArray(); - } catch (StatusRuntimeException e) { - logger.error("log size error", e); - return null; - } - } - -} \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/AccessTokenService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/AccessTokenService.java deleted file mode 100644 index 537ea66d98..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/AccessTokenService.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.common.utils.*; -import cn.escheduler.dao.entity.AccessToken; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.*; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.util.*; - -/** - * user service - */ -@Service -public class AccessTokenService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(AccessTokenService.class); - - @Autowired - private AccessTokenMapper accessTokenMapper; - - - /** - * query access token list - * - * @param loginUser - * @param searchVal - * @param pageNo - * @param pageSize - * @return - */ - public Map queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); - - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - Page page = new Page(pageNo, pageSize); - int userId = loginUser.getId(); - if (loginUser.getUserType() == UserType.ADMIN_USER){ - userId = 0; - } - IPage accessTokenList = accessTokenMapper.selectAccessTokenPage(page, searchVal, userId); - pageInfo.setTotalCount((int)accessTokenList.getTotal()); - pageInfo.setLists(accessTokenList.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * check - * - * @param result - * @param bool - * @param userNoOperationPerm - * @param status - * @return - */ - private boolean check(Map result, boolean bool, Status userNoOperationPerm, String status) { - //only admin can operate - if (bool) { - result.put(Constants.STATUS, userNoOperationPerm); - result.put(status, userNoOperationPerm.getMsg()); - return true; - } - return false; - } - - - /** - * create token - * - * @param userId - * @param expireTime - * @param token - * @return - */ - public Map createToken(int userId, String expireTime, String token) { - Map result = new HashMap<>(5); - - AccessToken accessToken = new AccessToken(); - accessToken.setUserId(userId); - accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); - accessToken.setToken(token); - accessToken.setCreateTime(new Date()); - accessToken.setUpdateTime(new Date()); - - // insert - int insert = accessTokenMapper.insert(accessToken); - - if (insert > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.CREATE_ALERT_GROUP_ERROR); - } - - return result; - } - - /** - * generate token - * @param userId - * @param expireTime - * @return - */ - public Map generateToken(int userId, String expireTime) { - Map result = new HashMap<>(5); - String token = EncryptionUtils.getMd5(userId + expireTime + String.valueOf(System.currentTimeMillis())); - result.put(Constants.DATA_LIST, token); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * delete access token - * @param loginUser - * @param id - * @return - */ - public Map delAccessTokenById(User loginUser, int id) { - Map result = new HashMap<>(5); - //only admin can operate - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NOT_EXIST, id); - return result; - } - - accessTokenMapper.deleteById(id); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * update token by id - * @param id - * @param userId - * @param expireTime - * @param token - * @return - */ - public Map updateToken(int id,int userId, String expireTime, String token) { - Map result = new HashMap<>(5); - AccessToken accessToken = new AccessToken(); - accessToken.setId(id); - accessToken.setUserId(userId); - accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); - accessToken.setToken(token); - accessToken.setUpdateTime(new Date()); - - accessTokenMapper.updateById(accessToken); - - putMsg(result, Status.SUCCESS); - return result; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/AlertGroupService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/AlertGroupService.java deleted file mode 100644 index 2d3f3d85ce..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/AlertGroupService.java +++ /dev/null @@ -1,294 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.AlertType; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.AlertGroup; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.entity.UserAlertGroup; -import cn.escheduler.dao.mapper.AlertGroupMapper; -import cn.escheduler.dao.mapper.UserAlertGroupMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * alert group service - */ -@Service -public class AlertGroupService { - - private static final Logger logger = LoggerFactory.getLogger(AlertGroupService.class); - - @Autowired - private AlertGroupMapper alertGroupMapper; - - @Autowired - private UserAlertGroupMapper userAlertGroupMapper; - - /** - * query alert group list - * - * @return - */ - public HashMap queryAlertgroup() { - - HashMap result = new HashMap<>(5); - List alertGroups = alertGroupMapper.queryAllGroupList(); - result.put(Constants.DATA_LIST, alertGroups); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * paging query alarm group list - * - * @param loginUser - * @param searchVal - * @param pageNo - * @param pageSize - * @return - */ - public Map listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - - Map result = new HashMap<>(5); - - Page page = new Page(pageNo, pageSize); - IPage alertGroupIPage = alertGroupMapper.queryAlertGroupPage( - page, searchVal); - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount((int)alertGroupIPage.getTotal()); - pageInfo.setLists(alertGroupIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * create alert group - * - * @param loginUser - * @param groupName - * @param groupType - * @param desc - * @return - */ - public Map createAlertgroup(User loginUser, String groupName, AlertType groupType, String desc) { - Map result = new HashMap<>(5); - //only admin can operate - if (checkAdmin(loginUser, result)){ - return result; - } - - AlertGroup alertGroup = new AlertGroup(); - Date now = new Date(); - - alertGroup.setGroupName(groupName); - alertGroup.setGroupType(groupType); - alertGroup.setDescription(desc); - alertGroup.setCreateTime(now); - alertGroup.setUpdateTime(now); - - // insert - int insert = alertGroupMapper.insert(alertGroup); - - if (insert > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.CREATE_ALERT_GROUP_ERROR); - } - return result; - } - - /** - * check user is admin or not - * - * @param user - * @return - */ - public boolean isAdmin(User user) { - return user.getUserType() == UserType.ADMIN_USER; - } - - /** - * updateProcessInstance alert group - * - * @param loginUser - * @param id - * @param groupName - * @param groupType - * @param desc - * @return - */ - public Map updateAlertgroup(User loginUser, int id, String groupName, AlertType groupType, String desc) { - Map result = new HashMap<>(5); - - if (checkAdmin(loginUser, result)){ - return result; - } - - - AlertGroup alertGroup = alertGroupMapper.selectById(id); - - if (alertGroup == null) { - putMsg(result, Status.ALERT_GROUP_NOT_EXIST); - return result; - - } - - Date now = new Date(); - - if (StringUtils.isNotEmpty(groupName)) { - alertGroup.setGroupName(groupName); - } - - if (groupType != null) { - alertGroup.setGroupType(groupType); - } - alertGroup.setDescription(desc); - alertGroup.setUpdateTime(now); - // updateProcessInstance - alertGroupMapper.updateById(alertGroup); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * delete alert group by id - * - * @param loginUser - * @param id - * @return - */ - public Map delAlertgroupById(User loginUser, int id) { - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - - //only admin can operate - if (checkAdmin(loginUser, result)){ - return result; - } - - - alertGroupMapper.deleteById(id); - putMsg(result, Status.SUCCESS); - return result; - } - - - /** - * grant user - * - * @param loginUser - * @param alertgroupId - * @param userIds - * @return - */ - public Map grantUser(User loginUser, int alertgroupId, String userIds) { - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - - //only admin can operate - if (checkAdmin(loginUser, result)){ - return result; - } - - userAlertGroupMapper.deleteByAlertgroupId(alertgroupId); - if (StringUtils.isEmpty(userIds)) { - putMsg(result, Status.SUCCESS); - return result; - } - - String[] userIdsArr = userIds.split(","); - - for (String userId : userIdsArr) { - Date now = new Date(); - UserAlertGroup userAlertGroup = new UserAlertGroup(); - userAlertGroup.setAlertgroupId(alertgroupId); - userAlertGroup.setUserId(Integer.parseInt(userId)); - userAlertGroup.setCreateTime(now); - userAlertGroup.setUpdateTime(now); - userAlertGroupMapper.insert(userAlertGroup); - } - - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * verify group name exists - * - * @param loginUser - * @param groupName - * @return - */ - public Result verifyGroupName(User loginUser, String groupName) { - Result result = new Result(); - List alertGroup = alertGroupMapper.queryByGroupName(groupName); - if (alertGroup != null && alertGroup.size() > 0) { - logger.error("group {} has exist, can't create again.", groupName); - result.setCode(Status.ALERT_GROUP_EXIST.getCode()); - result.setMsg(Status.ALERT_GROUP_EXIST.getMsg()); - } else { - result.setCode(Status.SUCCESS.getCode()); - result.setMsg(Status.SUCCESS.getMsg()); - } - - return result; - } - - /** - * is admin? - * @param loginUser - * @param result - * @return - */ - private boolean checkAdmin(User loginUser, Map result) { - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return true; - } - return false; - } - - /** - * put message - * - * @param result - * @param status - */ - private void putMsg(Map result, Status status) { - result.put(Constants.STATUS, status); - result.put(Constants.MSG, status.getMsg()); - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/BaseDAGService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/BaseDAGService.java deleted file mode 100644 index af757b9535..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/BaseDAGService.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.common.graph.DAG; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.model.TaskNodeRelation; -import cn.escheduler.common.process.ProcessDag; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.entity.ProcessData; -import cn.escheduler.dao.entity.ProcessInstance; - -import java.util.ArrayList; -import java.util.List; - -/** - * base DAG service - */ -public class BaseDAGService extends BaseService{ - - - /** - * process instance to DAG - * - * @param processInstance - * @return - * @throws Exception - */ - public static DAG processInstance2DAG(ProcessInstance processInstance) throws Exception { - - String processDefinitionJson = processInstance.getProcessInstanceJson(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - List taskNodeList = processData.getTasks(); - - List taskNodeRelations = new ArrayList<>(); - - //Traversing node information and building relationships - for (TaskNode taskNode : taskNodeList) { - String preTasks = taskNode.getPreTasks(); - List preTasksList = JSONUtils.toList(preTasks, String.class); - - //if previous tasks not empty - if (preTasksList != null) { - for (String depNode : preTasksList) { - taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); - } - } - } - - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(taskNodeList); - - - // generate detail Dag, to be executed - DAG dag = new DAG<>(); - - if (CollectionUtils.isNotEmpty(processDag.getNodes())) { - for (TaskNode node : processDag.getNodes()) { - dag.addNode(node.getName(), node); - } - } - - if (CollectionUtils.isNotEmpty(processDag.getEdges())) { - for (TaskNodeRelation edge : processDag.getEdges()) { - dag.addEdge(edge.getStartNode(), edge.getEndNode()); - } - } - - return dag; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/BaseService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/BaseService.java deleted file mode 100644 index dd63d4d7c4..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/BaseService.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.common.utils.HadoopUtils; -import cn.escheduler.dao.entity.User; -import org.apache.commons.lang3.StringUtils; - -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; -import java.text.MessageFormat; -import java.util.Map; - -/** - * base service - */ -public class BaseService { - - /** - * check admin - * - * @param user - * @return - */ - protected boolean isAdmin(User user) { - return user.getUserType() == UserType.ADMIN_USER; - } - - /** - * check admin - * - * @param loginUser - * @param result - * @return - */ - protected boolean checkAdmin(User loginUser, Map result) { - //only admin can operate - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return true; - } - return false; - } - - /** - * put message to map - * - * @param result - * @param status - * @param statusParams - */ - protected void putMsg(Map result, Status status, Object... statusParams) { - result.put(Constants.STATUS, status); - if (statusParams != null && statusParams.length > 0) { - result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); - } else { - result.put(Constants.MSG, status.getMsg()); - } - } - - /** - * put message to result object - * - * @param result - * @param status - */ - protected void putMsg(Result result, Status status, Object... statusParams) { - result.setCode(status.getCode()); - - if (statusParams != null && statusParams.length > 0) { - result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); - } else { - result.setMsg(status.getMsg()); - } - - } - - /** - * get cookie info by name - * @param request - * @param name - * @return get cookie info - */ - public static Cookie getCookie(HttpServletRequest request, String name) { - Cookie[] cookies = request.getCookies(); - if (cookies != null && cookies.length > 0) { - for (Cookie cookie : cookies) { - if (StringUtils.equalsIgnoreCase(name, cookie.getName())) { - return cookie; - } - } - } - - return null; - } - - /** - * create tenant dir if not exists - * @param tenantCode - * @throws Exception - */ - protected void createTenantDirIfNotExists(String tenantCode)throws Exception{ - - String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); - String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); - /** - * init resource path and udf path - */ - HadoopUtils.getInstance().mkdir(resourcePath); - HadoopUtils.getInstance().mkdir(udfsPath); - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/DataAnalysisService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/DataAnalysisService.java deleted file mode 100644 index 284e19a819..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/DataAnalysisService.java +++ /dev/null @@ -1,416 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - - -import cn.escheduler.api.dto.CommandStateCount; -import cn.escheduler.api.dto.DefineUserDto; -import cn.escheduler.api.dto.TaskCountDto; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.common.queue.ITaskQueue; -import cn.escheduler.common.queue.TaskQueueFactory; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.*; -import cn.escheduler.dao.mapper.*; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.text.MessageFormat; -import java.util.*; - -/** - * data analysis service - */ -@Service -public class DataAnalysisService { - - private static final Logger logger = LoggerFactory.getLogger(DataAnalysisService.class); - - @Autowired - ProjectMapper projectMapper; - - @Autowired - ProjectService projectService; - - @Autowired - ProcessInstanceMapper processInstanceMapper; - - @Autowired - ProcessDefinitionMapper processDefinitionMapper; - - @Autowired - CommandMapper commandMapper; - - @Autowired - ErrorCommandMapper errorCommandMapper; - - @Autowired - TaskInstanceMapper taskInstanceMapper; - - @Autowired - ProcessDao processDao; - - /** - * statistical task instance status data - * - * @param loginUser - * @param projectId - * @param startDate - * @param endDate - * @return - */ - public Map countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) { - - Map result = new HashMap<>(5); - if(projectId != 0){ - Project project = projectMapper.selectById(projectId); - result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); - - if (getResultStatus(result)){ - return result; - } - } - - /** - * find all the task lists in the project under the user - * statistics based on task status execution, failure, completion, wait, total - */ - Date start = null; - Date end = null; - - try { - start = DateUtils.getScheduleDate(startDate); - end = DateUtils.getScheduleDate(endDate); - } catch (Exception e) { - logger.error(e.getMessage(),e); - putErrorRequestParamsMsg(result); - return result; - } - - List taskInstanceStateCounts = - taskInstanceMapper.countTaskInstanceStateByUser(loginUser.getId(), - loginUser.getUserType(), start, end, String.valueOf(projectId)); - - TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts); - if (taskInstanceStateCounts != null) { - result.put(Constants.DATA_LIST, taskCountResult); - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.TASK_INSTANCE_STATE_COUNT_ERROR); - } - return result; - } - - private void putErrorRequestParamsMsg(Map result) { - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); - } - - /** - * statistical process instance status data - * - * @param loginUser - * @param projectId - * @param startDate - * @param endDate - * @return - */ - public Map countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) { - - Map result = new HashMap<>(5); - if(projectId != 0){ - Project project = projectMapper.selectById(projectId); - result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); - - if (getResultStatus(result)){ - return result; - } - } - - Date start = null; - Date end = null; - try { - start = DateUtils.getScheduleDate(startDate); - end = DateUtils.getScheduleDate(endDate); - } catch (Exception e) { - logger.error(e.getMessage(),e); - putErrorRequestParamsMsg(result); - return result; - } - - List projectIds = new ArrayList<>(); - if(projectId !=0){ - projectIds.add(projectId); - }else if(loginUser.getUserType() == UserType.GENERAL_USER){ - projectIds = processDao.getProjectIdListHavePerm(loginUser.getId()); - - } - Integer[] projectIdArray = projectIds.toArray(new Integer[projectIds.size()]); - - List processInstanceStateCounts = - processInstanceMapper.countInstanceStateByUser(start, end, - projectIdArray); - - TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts); - if (processInstanceStateCounts != null) { - result.put(Constants.DATA_LIST, taskCountResult); - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.COUNT_PROCESS_INSTANCE_STATE_ERROR); - } - return result; - } - - - /** - * statistics the process definition quantities of certain person - * - * @param loginUser - * @param projectId - * @return - */ - public Map countDefinitionByUser(User loginUser, int projectId) { - Map result = new HashMap<>(); - - - Integer[] projectIdArray = new Integer[1]; - projectIdArray[0] = projectId; - List defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser( - loginUser.getId(), projectIdArray); - - DefineUserDto dto = new DefineUserDto(defineGroupByUsers); - result.put(Constants.DATA_LIST, dto); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * - * @param result - * @param status - */ - private void putMsg(Map result, Status status) { - result.put(Constants.STATUS, status); - result.put(Constants.MSG, status.getMsg()); - } - - /** - * get result status - * @param result - * @return - */ - private boolean getResultStatus(Map result) { - Status resultEnum = (Status) result.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return true; - } - return false; - } - - /** - * statistical command status data - * - * @param loginUser - * @param projectId - * @param startDate - * @param endDate - * @return - */ - public Map countCommandState(User loginUser, int projectId, String startDate, String endDate) { - - Map result = new HashMap<>(5); - if(projectId != 0){ - Project project = projectMapper.selectById(projectId); - result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); - - if (getResultStatus(result)){ - return result; - } - } - - /** - * find all the task lists in the project under the user - * statistics based on task status execution, failure, completion, wait, total - */ - Date start = null; - Date end = null; - - try { - start = DateUtils.getScheduleDate(startDate); - end = DateUtils.getScheduleDate(endDate); - } catch (Exception e) { - logger.error(e.getMessage(),e); - putErrorRequestParamsMsg(result); - return result; - } - - List projectIds = new ArrayList<>(); - if(projectId !=0){ - projectIds.add(projectId); - }else if(loginUser.getUserType() == UserType.GENERAL_USER){ - projectIds = processDao.getProjectIdListHavePerm(loginUser.getId()); - - } - Integer[] projectIdArray = projectIds.toArray(new Integer[projectIds.size()]); - // count command state - List commandStateCounts = - commandMapper.countCommandState( - loginUser.getId(), - start, - end, - projectIdArray); - - // count error command state - List errorCommandStateCounts = - errorCommandMapper.countCommandState( - start, end, projectIdArray); - - // - Map> dataMap = new HashMap<>(); - - Map commonCommand = new HashMap<>(); - commonCommand.put("commandState",0); - commonCommand.put("errorCommandState",0); - - - // init data map -// dataMap.put(ExecutionStatus.SUBMITTED_SUCCESS,commonCommand); -// dataMap.put(ExecutionStatus.RUNNING_EXEUTION,commonCommand); -// dataMap.put(ExecutionStatus.READY_PAUSE,commonCommand); -// dataMap.put(ExecutionStatus.PAUSE,commonCommand); -// dataMap.put(ExecutionStatus.READY_STOP,commonCommand); -// dataMap.put(ExecutionStatus.STOP,commonCommand); -// dataMap.put(ExecutionStatus.FAILURE,commonCommand); -// dataMap.put(ExecutionStatus.SUCCESS,commonCommand); -// dataMap.put(ExecutionStatus.NEED_FAULT_TOLERANCE,commonCommand); -// dataMap.put(ExecutionStatus.KILL,commonCommand); -// dataMap.put(ExecutionStatus.WAITTING_THREAD,commonCommand); -// dataMap.put(ExecutionStatus.WAITTING_DEPEND,commonCommand); - - // put command state - for (CommandCount executeStatusCount : commandStateCounts){ - Map commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType())); - commandStateCountsMap.put("commandState", executeStatusCount.getCount()); - dataMap.put(executeStatusCount.getCommandType(),commandStateCountsMap); - } - - // put error command state - for (CommandCount errorExecutionStatus : errorCommandStateCounts){ - Map errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType())); - errorCommandStateCountsMap.put("errorCommandState",errorExecutionStatus.getCount()); - dataMap.put(errorExecutionStatus.getCommandType(),errorCommandStateCountsMap); - } - - List list = new ArrayList<>(); - Iterator>> iterator = dataMap.entrySet().iterator(); - while (iterator.hasNext()){ - Map.Entry> next = iterator.next(); - CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get("errorCommandState"), - next.getValue().get("commandState"),next.getKey()); - list.add(commandStateCount); - } - - result.put(Constants.DATA_LIST, list); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * count queue state - * @param loginUser - * @param projectId - * @return - */ - public Map countQueueState(User loginUser, int projectId) { - Map result = new HashMap<>(5); - if(projectId != 0){ - Project project = projectMapper.selectById(projectId); - result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); - - if (getResultStatus(result)){ - return result; - } - } - - ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); - List tasksQueueList = tasksQueue.getAllTasks(cn.escheduler.common.Constants.SCHEDULER_TASKS_QUEUE); - List tasksKillList = tasksQueue.getAllTasks(cn.escheduler.common.Constants.SCHEDULER_TASKS_KILL); - - Map dataMap = new HashMap<>(); - if (loginUser.getUserType() == UserType.ADMIN_USER){ - dataMap.put("taskQueue",tasksQueueList.size()); - dataMap.put("taskKill",tasksKillList.size()); - - result.put(Constants.DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } - - int[] tasksQueueIds = new int[tasksQueueList.size()]; - int[] tasksKillIds = new int[tasksKillList.size()]; - - int i =0; - for (String taskQueueStr : tasksQueueList){ - if (StringUtils.isNotEmpty(taskQueueStr)){ - String[] splits = taskQueueStr.split("_"); - if (splits.length == 4){ - tasksQueueIds[i++]=Integer.parseInt(splits[3]); - } - } - } - - i = 0; - for (String taskKillStr : tasksKillList){ - if (StringUtils.isNotEmpty(taskKillStr)){ - String[] splits = taskKillStr.split("-"); - if (splits.length == 2){ - tasksKillIds[i++]=Integer.parseInt(splits[1]); - } - } - } - Integer taskQueueCount = 0; - Integer taskKillCount = 0; - int[] projectIds = new int[1]; - projectIds[0] = projectId; - - if (tasksQueueIds.length != 0){ - taskQueueCount = taskInstanceMapper.countTask( - loginUser.getId(),loginUser.getUserType(),projectIds, - tasksQueueIds); - } - - if (tasksKillIds.length != 0){ - taskKillCount = taskInstanceMapper.countTask(loginUser.getId(),loginUser.getUserType(), projectIds, tasksKillIds); - } - - - - dataMap.put("taskQueue",taskQueueCount); - dataMap.put("taskKill",taskKillCount); - - result.put(Constants.DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java deleted file mode 100644 index 2b6fc5159d..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java +++ /dev/null @@ -1,690 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.DbType; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.common.job.db.*; -import cn.escheduler.common.utils.CommonUtils; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.entity.DataSource; -import cn.escheduler.dao.entity.Resource; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.DataSourceMapper; -import cn.escheduler.dao.mapper.DataSourceUserMapper; -import com.alibaba.fastjson.JSONObject; -import com.alibaba.fastjson.TypeReference; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.*; - -import static cn.escheduler.common.utils.PropertyUtils.getString; - -/** - * datasource service - */ -@Service -public class DataSourceService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(DataSourceService.class); - - public static final String NAME = "name"; - public static final String NOTE = "note"; - public static final String TYPE = "type"; - public static final String HOST = "host"; - public static final String PORT = "port"; - public static final String PRINCIPAL = "principal"; - public static final String DATABASE = "database"; - public static final String USER_NAME = "userName"; - public static final String PASSWORD = cn.escheduler.common.Constants.PASSWORD; - public static final String OTHER = "other"; - - - @Autowired - private DataSourceMapper dataSourceMapper; - - - @Autowired - private DataSourceUserMapper datasourceUserMapper; - - /** - * create data source - * - * @param loginUser - * @param name - * @param desc - * @param type - * @param parameter - * @return - */ - public Map createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { - - Map result = new HashMap<>(5); - // check name can use or not - if (checkName(name, result)) { - return result; - } - Boolean isConnection = checkConnection(type, parameter); - if (!isConnection) { - logger.info("connect failed, type:{}, parameter:{}", type, parameter); - putMsg(result, Status.DATASOURCE_CONNECT_FAILED); - return result; - } - - BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter); - if (datasource == null) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, parameter); - return result; - } - - // build datasource - DataSource dataSource = new DataSource(); - Date now = new Date(); - - dataSource.setName(name.trim()); - dataSource.setNote(desc); - dataSource.setUserId(loginUser.getId()); - dataSource.setUserName(loginUser.getUserName()); - dataSource.setType(type); - dataSource.setConnectionParams(parameter); - dataSource.setCreateTime(now); - dataSource.setUpdateTime(now); - dataSourceMapper.insert(dataSource); - - putMsg(result, Status.SUCCESS); - - return result; - } - - - /** - * updateProcessInstance datasource - * - * @param loginUser - * @param name - * @param desc - * @param type - * @param parameter - * @return - */ - public Map updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { - - Map result = new HashMap<>(); - // determine whether the data source exists - DataSource dataSource = dataSourceMapper.selectById(id); - if (dataSource == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - //check name can use or not - if(!name.trim().equals(dataSource.getName()) && checkName(name, result)){ - return result; - } - - Boolean isConnection = checkConnection(type, parameter); - if (!isConnection) { - logger.info("connect failed, type:{}, parameter:{}", type, parameter); - putMsg(result, Status.DATASOURCE_CONNECT_FAILED); - return result; - } - Date now = new Date(); - - dataSource.setName(name.trim()); - dataSource.setNote(desc); - dataSource.setUserName(loginUser.getUserName()); - dataSource.setType(type); - dataSource.setConnectionParams(parameter); - dataSource.setUpdateTime(now); - dataSourceMapper.updateById(dataSource); - putMsg(result, Status.SUCCESS); - return result; - } - - private boolean checkName(String name, Map result) { - List queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim()); - if (queryDataSource != null && queryDataSource.size() > 0) { - putMsg(result, Status.DATASOURCE_EXIST); - return true; - } - return false; - } - - - /** - * updateProcessInstance datasource - */ - public Map queryDataSource(int id) { - - Map result = new HashMap(5); - DataSource dataSource = dataSourceMapper.selectById(id); - if (dataSource == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - // type - String dataSourceType = dataSource.getType().toString(); - // name - String dataSourceName = dataSource.getName(); - // desc - String desc = dataSource.getNote(); - // parameter - String parameter = dataSource.getConnectionParams(); - - BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter); - String database = datasourceForm.getDatabase(); - // jdbc connection params - String other = datasourceForm.getOther(); - String address = datasourceForm.getAddress(); - - String[] hostsPorts = getHostsAndPort(address); - // ip host - String host = hostsPorts[0]; - // prot - String port = hostsPorts[1]; - String separator = ""; - - switch (dataSource.getType()) { - case HIVE: - case SQLSERVER: - separator = ";"; - break; - case MYSQL: - case POSTGRESQL: - case CLICKHOUSE: - case ORACLE: - separator = "&"; - break; - default: - separator = "&"; - break; - } - - Map otherMap = new LinkedHashMap(); - if (other != null) { - String[] configs = other.split(separator); - for (String config : configs) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - - } - - Map map = new HashMap<>(10); - map.put(NAME, dataSourceName); - map.put(NOTE, desc); - map.put(TYPE, dataSourceType); - map.put(HOST, host); - map.put(PORT, port); - map.put(PRINCIPAL, datasourceForm.getPrincipal()); - map.put(DATABASE, database); - map.put(USER_NAME, datasourceForm.getUser()); - map.put(PASSWORD, datasourceForm.getPassword()); - map.put(OTHER, otherMap); - result.put(Constants.DATA_LIST, map); - putMsg(result, Status.SUCCESS); - return result; - } - - - /** - * query datasource list by keyword - * - * @param loginUser - * @param searchVal - * @param pageNo - * @param pageSize - * @return - */ - public Map queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(); - IPage dataSourceList = null; - Page dataSourcePage = new Page(pageNo, pageSize); - - if (isAdmin(loginUser)) { - dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal); - }else{ - dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal); - } - - List dataSources = dataSourceList.getRecords(); - handlePasswd(dataSources); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int)(dataSourceList.getTotal())); - pageInfo.setLists(dataSources); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * get list paging - * - * @param loginUser - * @param searchVal - * @param pageSize - * @param pageInfo - * @return - */ - private List getDataSources(User loginUser, String searchVal, Integer pageSize, PageInfo pageInfo) { - IPage dataSourceList = null; - Page dataSourcePage = new Page(pageInfo.getStart(), pageSize); - - if (isAdmin(loginUser)) { - dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal); - }else{ - dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal); - } - List dataSources = dataSourceList.getRecords(); - - handlePasswd(dataSources); - return dataSources; - } - - - /** - * handle datasource connection password for safety - * @param dataSourceList - */ - private void handlePasswd(List dataSourceList) { - - for (DataSource dataSource : dataSourceList) { - - String connectionParams = dataSource.getConnectionParams(); - JSONObject object = JSONObject.parseObject(connectionParams); - object.put(cn.escheduler.common.Constants.PASSWORD, cn.escheduler.common.Constants.XXXXXX); - dataSource.setConnectionParams(JSONUtils.toJson(object)); - - } - } - - /** - * query data resource list - * - * @param loginUser - * @param type - * @return - */ - public Map queryDataSourceList(User loginUser, Integer type) { - Map result = new HashMap<>(5); - - List datasourceList; - - if (isAdmin(loginUser)) { - datasourceList = dataSourceMapper.listAllDataSourceByType(type); - }else{ - datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type); - } - - result.put(Constants.DATA_LIST, datasourceList); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * verify datasource exists - * - * @param loginUser - * @param name - * @return - */ - public Result verifyDataSourceName(User loginUser, String name) { - Result result = new Result(); - List dataSourceList = dataSourceMapper.queryDataSourceByName(name); - if (dataSourceList != null && dataSourceList.size() > 0) { - logger.error("datasource name:{} has exist, can't create again.", name); - putMsg(result, Status.DATASOURCE_EXIST); - } else { - putMsg(result, Status.SUCCESS); - } - - return result; - } - - /** - * get connection - * - * @param dbType - * @param parameter - * @return - */ - private Connection getConnection(DbType dbType, String parameter) { - Connection connection = null; - BaseDataSource datasource = null; - try { - switch (dbType) { - case POSTGRESQL: - datasource = JSONObject.parseObject(parameter, PostgreDataSource.class); - Class.forName(Constants.ORG_POSTGRESQL_DRIVER); - break; - case MYSQL: - datasource = JSONObject.parseObject(parameter, MySQLDataSource.class); - Class.forName(Constants.COM_MYSQL_JDBC_DRIVER); - break; - case HIVE: - case SPARK: - if (CommonUtils.getKerberosStartupState()) { - System.setProperty(cn.escheduler.common.Constants.JAVA_SECURITY_KRB5_CONF, - getString(cn.escheduler.common.Constants.JAVA_SECURITY_KRB5_CONF_PATH)); - Configuration configuration = new Configuration(); - configuration.set(cn.escheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - UserGroupInformation.setConfiguration(configuration); - UserGroupInformation.loginUserFromKeytab(getString(cn.escheduler.common.Constants.LOGIN_USER_KEY_TAB_USERNAME), - getString(cn.escheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH)); - } - if (dbType == DbType.HIVE){ - datasource = JSONObject.parseObject(parameter, HiveDataSource.class); - }else if (dbType == DbType.SPARK){ - datasource = JSONObject.parseObject(parameter, SparkDataSource.class); - } - Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); - break; - case CLICKHOUSE: - datasource = JSONObject.parseObject(parameter, ClickHouseDataSource.class); - Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER); - break; - case ORACLE: - datasource = JSONObject.parseObject(parameter, OracleDataSource.class); - Class.forName(Constants.COM_ORACLE_JDBC_DRIVER); - break; - case SQLSERVER: - datasource = JSONObject.parseObject(parameter, SQLServerDataSource.class); - Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER); - break; - default: - break; - } - if(datasource != null){ - connection = DriverManager.getConnection(datasource.getJdbcUrl(), datasource.getUser(), datasource.getPassword()); - } - } catch (Exception e) { - logger.error(e.getMessage(),e); - } - return connection; - } - - - /** - * check connection - * - * @param type - * @param parameter - * @return - */ - public boolean checkConnection(DbType type, String parameter) { - Boolean isConnection = false; - Connection con = getConnection(type, parameter); - if (con != null) { - isConnection = true; - try { - con.close(); - } catch (SQLException e) { - logger.error("close connection fail at DataSourceService::checkConnection()", e); - } - } - return isConnection; - } - - - /** - * test connection - * - * @param loginUser - * @param id - * @return - */ - public boolean connectionTest(User loginUser, int id) { - DataSource dataSource = dataSourceMapper.selectById(id); - return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); - } - - /** - * build paramters - * - * @param name - * @param desc - * @param type - * @param host - * @param port - * @param database - * @param userName - * @param password - * @param other - * @return - */ - public String buildParameter(String name, String desc, DbType type, String host, - String port, String database,String principal,String userName, - String password, String other) { - - String address = buildAddress(type, host, port); - - String jdbcUrl = address + "/" + database; - if (CommonUtils.getKerberosStartupState() && - (type == DbType.HIVE || type == DbType.SPARK)){ - jdbcUrl += ";principal=" + principal; - } - - String separator = ""; - if (Constants.MYSQL.equals(type.name()) - || Constants.POSTGRESQL.equals(type.name()) - || Constants.CLICKHOUSE.equals(type.name()) - || Constants.ORACLE.equals(type.name())) { - separator = "&"; - } else if (Constants.HIVE.equals(type.name()) - || Constants.SPARK.equals(type.name()) - || Constants.SQLSERVER.equals(type.name())) { - separator = ";"; - } - - Map parameterMap = new LinkedHashMap(6); - parameterMap.put(Constants.ADDRESS, address); - parameterMap.put(Constants.DATABASE, database); - parameterMap.put(Constants.JDBC_URL, jdbcUrl); - parameterMap.put(Constants.USER, userName); - parameterMap.put(Constants.PASSWORD, password); - if (CommonUtils.getKerberosStartupState() && - (type == DbType.HIVE || type == DbType.SPARK)){ - parameterMap.put(Constants.PRINCIPAL,principal); - } - if (other != null && !"".equals(other)) { - Map map = JSONObject.parseObject(other, new TypeReference>() { - }); - if (map.size() > 0) { - Set keys = map.keySet(); - StringBuilder otherSb = new StringBuilder(); - for (String key : keys) { - otherSb.append(String.format("%s=%s%s", key, map.get(key), separator)); - - } - otherSb.deleteCharAt(otherSb.length() - 1); - parameterMap.put(Constants.OTHER, otherSb); - } - - } - - if(logger.isDebugEnabled()){ - logger.info("parameters map-----" + JSONObject.toJSONString(parameterMap)); - } - return JSONObject.toJSONString(parameterMap); - - - } - - private String buildAddress(DbType type, String host, String port) { - StringBuilder sb = new StringBuilder(); - if (Constants.MYSQL.equals(type.name())) { - sb.append(Constants.JDBC_MYSQL); - sb.append(host).append(":").append(port); - } else if (Constants.POSTGRESQL.equals(type.name())) { - sb.append(Constants.JDBC_POSTGRESQL); - sb.append(host).append(":").append(port); - } else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) { - sb.append(Constants.JDBC_HIVE_2); - String[] hostArray = host.split(","); - if (hostArray.length > 0) { - for (String zkHost : hostArray) { - sb.append(String.format("%s:%s,", zkHost, port)); - } - sb.deleteCharAt(sb.length() - 1); - } - } else if (Constants.CLICKHOUSE.equals(type.name())) { - sb.append(Constants.JDBC_CLICKHOUSE); - sb.append(host).append(":").append(port); - } else if (Constants.ORACLE.equals(type.name())) { - sb.append(Constants.JDBC_ORACLE); - sb.append(host).append(":").append(port); - } else if (Constants.SQLSERVER.equals(type.name())) { - sb.append(Constants.JDBC_SQLSERVER); - sb.append(host).append(":").append(port); - } - - return sb.toString(); - } - - /** - * delete datasource - * - * @param loginUser - * @param datasourceId - * @return - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public Result delete(User loginUser, int datasourceId) { - Result result = new Result(); - try { - //query datasource by id - DataSource dataSource = dataSourceMapper.selectById(datasourceId); - if(dataSource == null){ - logger.error("resource id {} not exist", datasourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - if(loginUser.getId() != dataSource.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER){ - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - dataSourceMapper.deleteById(datasourceId); - datasourceUserMapper.deleteByDatasourceId(datasourceId); - putMsg(result, Status.SUCCESS); - } catch (Exception e) { - logger.error("delete datasource fail",e); - throw new RuntimeException("delete datasource fail"); - } - return result; - } - - /** - * unauthorized datasource - * - * @param loginUser - * @param userId - * @return - */ - public Map unauthDatasource(User loginUser, Integer userId) { - - Map result = new HashMap<>(); - //only admin operate - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - /** - * query all data sources except userId - */ - List resultList = new ArrayList<>(); - List datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId); - Set datasourceSet = null; - if (datasourceList != null && datasourceList.size() > 0) { - datasourceSet = new HashSet<>(datasourceList); - - List authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId); - - Set authedDataSourceSet = null; - if (authedDataSourceList != null && authedDataSourceList.size() > 0) { - authedDataSourceSet = new HashSet<>(authedDataSourceList); - datasourceSet.removeAll(authedDataSourceSet); - - } - resultList = new ArrayList<>(datasourceSet); - } - result.put(Constants.DATA_LIST, resultList); - putMsg(result, Status.SUCCESS); - return result; - } - - - /** - * authorized datasource - * - * @param loginUser - * @param userId - * @return - */ - public Map authedDatasource(User loginUser, Integer userId) { - Map result = new HashMap<>(5); - - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - List authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId); - result.put(Constants.DATA_LIST, authedDatasourceList); - putMsg(result, Status.SUCCESS); - return result; - } - - - /** - * get host and port by address - * - * @param address - * @return - */ - private String[] getHostsAndPort(String address) { - String[] result = new String[2]; - String[] tmpArray = address.split(cn.escheduler.common.Constants.DOUBLE_SLASH); - String hostsAndPorts = tmpArray[tmpArray.length - 1]; - StringBuilder hosts = new StringBuilder(); - String[] hostPortArray = hostsAndPorts.split(cn.escheduler.common.Constants.COMMA); - String port = hostPortArray[0].split(cn.escheduler.common.Constants.COLON)[1]; - for (String hostPort : hostPortArray) { - hosts.append(hostPort.split(cn.escheduler.common.Constants.COLON)[0]).append(cn.escheduler.common.Constants.COMMA); - } - hosts.deleteCharAt(hosts.length() - 1); - result[0] = hosts.toString(); - result[1] = port; - return result; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/ExecutorService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/ExecutorService.java deleted file mode 100644 index c31bfa3152..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/ExecutorService.java +++ /dev/null @@ -1,540 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - - -import cn.escheduler.api.enums.ExecuteType; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.*; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.*; -import cn.escheduler.dao.mapper.ProcessDefinitionMapper; -import cn.escheduler.dao.mapper.ProcessInstanceMapper; -import cn.escheduler.dao.mapper.ProjectMapper; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.text.ParseException; -import java.util.*; - -import static cn.escheduler.common.Constants.*; - -/** - * executor service - */ -@Service -public class ExecutorService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(ExecutorService.class); - - @Autowired - private ProjectMapper projectMapper; - - @Autowired - private ProjectService projectService; - - @Autowired - private ProcessDefinitionMapper processDefinitionMapper; - - @Autowired - private ProcessDefinitionService processDefinitionService; - - - @Autowired - private ProcessInstanceMapper processInstanceMapper; - - - @Autowired - private ProcessDao processDao; - - /** - * execute process instance - * - * @param loginUser login user - * @param projectName project name - * @param processDefinitionId process Definition Id - * @param cronTime cron time - * @param commandType command type - * @param failureStrategy failuer strategy - * @param startNodeList start nodelist - * @param taskDependType node dependency type - * @param warningType warning type - * @param warningGroupId notify group id - * @param receivers receivers - * @param receiversCc receivers cc - * @param timeout timeout - * @return - */ - public Map execProcessInstance(User loginUser, String projectName, - int processDefinitionId, String cronTime, CommandType commandType, - FailureStrategy failureStrategy, String startNodeList, - TaskDependType taskDependType, WarningType warningType, int warningGroupId, - String receivers, String receiversCc, RunMode runMode, - Priority processInstancePriority, int workerGroupId, Integer timeout) throws ParseException { - Map result = new HashMap<>(5); - // timeout is valid - if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { - putMsg(result,Status.TASK_TIMEOUT_PARAMS_ERROR); - return result; - } - Project project = projectMapper.queryByName(projectName); - Map checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project); - if (checkResultAndAuth != null){ - return checkResultAndAuth; - } - - // check process define release state - ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId); - result = checkProcessDefinitionValid(processDefinition, processDefinitionId); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return result; - } - - if (!checkTenantSuitable(processDefinition)){ - logger.error("there is not any vaild tenant for the process definition: id:{},name:{}, ", - processDefinition.getId(), processDefinition.getName()); - putMsg(result, Status.TENANT_NOT_SUITABLE); - return result; - } - - /** - * create command - */ - int create = this.createCommand(commandType, processDefinitionId, - taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), - warningGroupId, runMode,processInstancePriority, workerGroupId); - if(create > 0 ){ - /** - * according to the process definition ID updateProcessInstance and CC recipient - */ - processDefinition.setReceivers(receivers); - processDefinition.setReceiversCc(receiversCc); - processDefinitionMapper.updateById(processDefinition); - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); - } - return result; - } - - - - /** - * check whether the process definition can be executed - * - * @param processDefinition - * @param processDefineId - * @return - */ - public Map checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId){ - Map result = new HashMap<>(5); - if (processDefinition == null) { - // check process definition exists - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,processDefineId); - } else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { - // check process definition online - putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE,processDefineId); - }else{ - result.put(Constants.STATUS, Status.SUCCESS); - } - return result; - } - - - - /** - * do action to process instance:pause, stop, repeat, recover from pause, recover from stop - * - * @param loginUser - * @param projectName - * @param processInstanceId - * @param executeType - * @return - */ - public Map execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = checkResultAndAuth(loginUser, projectName, project); - if (checkResult != null) { - return checkResult; - } - - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); - if (processInstance == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); - return result; - } - - ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); - if(executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE){ - result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId()); - if (result.get(Constants.STATUS) != Status.SUCCESS) { - return result; - } - } - - checkResult = checkExecuteType(processInstance, executeType); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - return checkResult; - } - if (!checkTenantSuitable(processDefinition)){ - logger.error("there is not any vaild tenant for the process definition: id:{},name:{}, ", - processDefinition.getId(), processDefinition.getName()); - putMsg(result, Status.TENANT_NOT_SUITABLE); - } - - switch (executeType) { - case REPEAT_RUNNING: - result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING); - break; - case RECOVER_SUSPENDED_PROCESS: - result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS); - break; - case START_FAILURE_TASK_PROCESS: - result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS); - break; - case STOP: - if (processInstance.getState() == ExecutionStatus.READY_STOP) { - putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); - } else { - processInstance.setCommandType(CommandType.STOP); - processInstance.addHistoryCmd(CommandType.STOP); - processDao.updateProcessInstance(processInstance); - result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_STOP); - } - break; - case PAUSE: - if (processInstance.getState() == ExecutionStatus.READY_PAUSE) { - putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); - } else { - processInstance.setCommandType(CommandType.PAUSE); - processInstance.addHistoryCmd(CommandType.PAUSE); - processDao.updateProcessInstance(processInstance); - result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_PAUSE); - } - break; - default: - logger.error(String.format("unknown execute type : %s", executeType.toString())); - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); - - break; - } - return result; - } - - /** - * check tenant suitable - * @param processDefinition - * @return - */ - private boolean checkTenantSuitable(ProcessDefinition processDefinition) { - // checkTenantExists(); - Tenant tenant = processDao.getTenantForProcess(processDefinition.getTenantId(), - processDefinition.getUserId()); - if(tenant == null){ - return false; - } - return true; - } - - /** - * Check the state of process instance and the type of operation match - * - * @param processInstance - * @param executeType - * @return - */ - private Map checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) { - - Map result = new HashMap<>(5); - ExecutionStatus executionStatus = processInstance.getState(); - boolean checkResult = false; - switch (executeType) { - case PAUSE: - case STOP: - if (executionStatus.typeIsRunning()) { - checkResult = true; - } - break; - case REPEAT_RUNNING: - if (executionStatus.typeIsFinished()) { - checkResult = true; - } - break; - case START_FAILURE_TASK_PROCESS: - if (executionStatus.typeIsFailure()) { - checkResult = true; - } - break; - case RECOVER_SUSPENDED_PROCESS: - if (executionStatus.typeIsPause()|| executionStatus.typeIsCancel()) { - checkResult = true; - } - default: - break; - } - if (!checkResult) { - putMsg(result,Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); - } else { - putMsg(result, Status.SUCCESS); - } - return result; - } - - /** - * update process instance state - * - * @param processInstanceId - * @param executionStatus - * @return - */ - private Map updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { - Map result = new HashMap<>(5); - - int update = processDao.updateProcessInstanceState(processInstanceId, executionStatus); - if (update > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); - } - - return result; - } - - /** - * insert command, used in the implementation of the page, re run, recovery (pause / failure) execution - * - * @param loginUser - * @param instanceId - * @param processDefinitionId - * @param commandType - * @return - */ - private Map insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) { - Map result = new HashMap<>(5); - Command command = new Command(); - command.setCommandType(commandType); - command.setProcessDefinitionId(processDefinitionId); - command.setCommandParam(String.format("{\"%s\":%d}", - CMDPARAM_RECOVER_PROCESS_ID_STRING, instanceId)); - command.setExecutorId(loginUser.getId()); - - if(!processDao.verifyIsNeedCreateCommand(command)){ - putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND,processDefinitionId); - return result; - } - - int create = processDao.createCommand(command); - - if (create > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); - } - - return result; - } - - /** - * check if subprocesses are offline before starting process definition - * @param processDefineId - * @return - */ - public Map startCheckByProcessDefinedId(int processDefineId) { - Map result = new HashMap(); - - if (processDefineId == 0){ - logger.error("process definition id is null"); - putMsg(result,Status.REQUEST_PARAMS_NOT_VALID_ERROR,"process definition id"); - } - List ids = new ArrayList<>(); - processDao.recurseFindSubProcessId(processDefineId, ids); - Integer[] idArray = ids.toArray(new Integer[ids.size()]); - if (ids.size() > 0){ - List processDefinitionList; - processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); - if (processDefinitionList != null && processDefinitionList.size() > 0){ - for (ProcessDefinition processDefinition : processDefinitionList){ - /** - * if there is no online process, exit directly - */ - if (processDefinition.getReleaseState() != ReleaseState.ONLINE){ - putMsg(result,Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); - logger.info("not release process definition id: {} , name : {}", - processDefinition.getId(), processDefinition.getName()); - return result; - } - } - } - } - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * query recipients and copyers by process definition id or processInstanceId - * - * @param processDefineId - * @return - */ - public Map getReceiverCc(Integer processDefineId,Integer processInstanceId) { - Map result = new HashMap<>(); - logger.info("processInstanceId {}",processInstanceId); - if(processDefineId == null && processInstanceId == null){ - throw new RuntimeException("You must set values for parameters processDefineId or processInstanceId"); - } - if(processDefineId == null && processInstanceId != null) { - ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId); - if (processInstance == null) { - throw new RuntimeException("processInstanceId is not exists"); - } - processDefineId = processInstance.getProcessDefinitionId(); - } - ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefineId); - if (processDefinition == null){ - throw new RuntimeException(String.format("processDefineId %d is not exists",processDefineId)); - } - - String receivers = processDefinition.getReceivers(); - String receiversCc = processDefinition.getReceiversCc(); - Map dataMap = new HashMap<>(); - dataMap.put(Constants.RECEIVERS,receivers); - dataMap.put(Constants.RECEIVERS_CC,receiversCc); - - result.put(Constants.DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } - - - /** - * create command - * - * @param commandType - * @param processDefineId - * @param nodeDep - * @param failureStrategy - * @param startNodeList - * @param schedule - * @param warningType - * @param excutorId - * @param warningGroupId - * @param runMode - * @return - * @throws ParseException - */ - private int createCommand(CommandType commandType, int processDefineId, - TaskDependType nodeDep, FailureStrategy failureStrategy, - String startNodeList, String schedule, WarningType warningType, - int excutorId, int warningGroupId, - RunMode runMode,Priority processInstancePriority, int workerGroupId) throws ParseException { - - /** - * instantiate command schedule instance - */ - Command command = new Command(); - - Map cmdParam = new HashMap<>(); - if(commandType == null){ - command.setCommandType(CommandType.START_PROCESS); - }else{ - command.setCommandType(commandType); - } - command.setProcessDefinitionId(processDefineId); - if(nodeDep != null){ - command.setTaskDependType(nodeDep); - } - if(failureStrategy != null){ - command.setFailureStrategy(failureStrategy); - } - - if(StringUtils.isNotEmpty(startNodeList)){ - cmdParam.put(CMDPARAM_START_NODE_NAMES, startNodeList); - } - if(warningType != null){ - command.setWarningType(warningType); - } - command.setCommandParam(JSONUtils.toJson(cmdParam)); - command.setExecutorId(excutorId); - command.setWarningGroupId(warningGroupId); - command.setProcessInstancePriority(processInstancePriority); - command.setWorkerGroupId(workerGroupId); - - Date start = null; - Date end = null; - if(StringUtils.isNotEmpty(schedule)){ - String[] interval = schedule.split(","); - if(interval.length == 2){ - start = DateUtils.getScheduleDate(interval[0]); - end = DateUtils.getScheduleDate(interval[1]); - } - } - - if(commandType == CommandType.COMPLEMENT_DATA){ - runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; - if(runMode == RunMode.RUN_MODE_SERIAL){ - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); - command.setCommandParam(JSONUtils.toJson(cmdParam)); - return processDao.createCommand(command); - }else if (runMode == RunMode.RUN_MODE_PARALLEL){ - int runCunt = 0; - while(!start.after(end)){ - runCunt += 1; - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); - command.setCommandParam(JSONUtils.toJson(cmdParam)); - processDao.createCommand(command); - start = DateUtils.getSomeDay(start, 1); - } - return runCunt; - } - }else{ - command.setCommandParam(JSONUtils.toJson(cmdParam)); - return processDao.createCommand(command); - } - - return 0; - } - - /** - * check result and auth - * - * @param loginUser - * @param projectName - * @param project - * @return - */ - private Map checkResultAndAuth(User loginUser, String projectName, Project project) { - // check project auth - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - return checkResult; - } - return null; - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/LoggerService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/LoggerService.java deleted file mode 100644 index ec810f12f4..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/LoggerService.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.log.LogClient; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.Constants; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.TaskInstance; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -/** - * log service - */ -@Service -public class LoggerService { - - private static final Logger logger = LoggerFactory.getLogger(LoggerService.class); - - @Autowired - private ProcessDao processDao; - - /** - * view log - * - * @param taskInstId - * @param skipLineNum - * @param limit - * @return - */ - public Result queryLog(int taskInstId, int skipLineNum, int limit) { - - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); - - if (taskInstance == null){ - return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg()); - } - - String host = taskInstance.getHost(); - if(StringUtils.isEmpty(host)){ - return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg()); - } - - - Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); - - logger.info("log host : {} , logPath : {} , logServer port : {}",host,taskInstance.getLogPath(),Constants.RPC_PORT); - - LogClient logClient = new LogClient(host, Constants.RPC_PORT); - String log = logClient.rollViewLog(taskInstance.getLogPath(),skipLineNum,limit); - result.setData(log); - logger.info(log); - - return result; - } - - /** - * get log size - * - * @param taskInstId - * @return - */ - public byte[] getLogBytes(int taskInstId) { - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); - if (taskInstance == null){ - throw new RuntimeException("task instance is null"); - } - String host = taskInstance.getHost(); - LogClient logClient = new LogClient(host, Constants.RPC_PORT); - return logClient.getLogBytes(taskInstance.getLogPath()); - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/MonitorService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/MonitorService.java deleted file mode 100644 index 8c3d189371..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/MonitorService.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.ZookeeperMonitor; -import cn.escheduler.common.enums.ZKNodeType; -import cn.escheduler.dao.MonitorDBDao; -import cn.escheduler.common.model.MasterServer; -import cn.escheduler.dao.entity.MonitorRecord; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.entity.ZookeeperRecord; -import org.springframework.stereotype.Service; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * monitor service - */ -@Service -public class MonitorService extends BaseService{ - - /** - * query database state - * - * @return - */ - public Map queryDatabaseState(User loginUser) { - Map result = new HashMap<>(5); - - List monitorRecordList = MonitorDBDao.queryDatabaseState(); - - result.put(Constants.DATA_LIST, monitorRecordList); - putMsg(result, Status.SUCCESS); - - return result; - - } - - /** - * query master list - * - * @param loginUser - * @return - */ - public Map queryMaster(User loginUser) { - - Map result = new HashMap<>(5); - - List masterServers = getServerListFromZK(true); - result.put(Constants.DATA_LIST, masterServers); - putMsg(result,Status.SUCCESS); - - return result; - } - - /** - * query zookeeper state - * - * @return - */ - public Map queryZookeeperState(User loginUser) { - Map result = new HashMap<>(5); - - List zookeeperRecordList = ZookeeperMonitor.zookeeperInfoList(); - - result.put(Constants.DATA_LIST, zookeeperRecordList); - putMsg(result, Status.SUCCESS); - - return result; - - } - - - /** - * query master list - * - * @param loginUser - * @return - */ - public Map queryWorker(User loginUser) { - - Map result = new HashMap<>(5); - List workerServers = getServerListFromZK(false); - - result.put(Constants.DATA_LIST, workerServers); - putMsg(result,Status.SUCCESS); - - return result; - } - - public List getServerListFromZK(boolean isMaster){ - List servers = new ArrayList<>(); - ZookeeperMonitor zookeeperMonitor = null; - try{ - zookeeperMonitor = new ZookeeperMonitor(); - ZKNodeType zkNodeType = isMaster ? ZKNodeType.MASTER : ZKNodeType.WORKER; - servers = zookeeperMonitor.getServersList(zkNodeType); - }catch (Exception e){ - throw e; - }finally { - if(zookeeperMonitor != null){ - zookeeperMonitor.close(); - } - } - return servers; - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java deleted file mode 100644 index bfde9a0cf4..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java +++ /dev/null @@ -1,1123 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.dto.treeview.Instance; -import cn.escheduler.api.dto.treeview.TreeViewDto; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.common.enums.*; -import cn.escheduler.common.graph.DAG; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.model.TaskNodeRelation; -import cn.escheduler.common.process.ProcessDag; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.*; -import cn.escheduler.dao.mapper.*; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONArray; -import com.alibaba.fastjson.JSONObject; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import com.fasterxml.jackson.core.JsonProcessingException; -import org.apache.commons.lang3.ObjectUtils; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.MediaType; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.web.multipart.MultipartFile; - -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; -import java.io.BufferedOutputStream; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; - -import static cn.escheduler.api.enums.Status.UPDATE_PROCESS_DEFINITION_ERROR; -import static cn.escheduler.api.service.SchedulerService.deleteSchedule; -import static cn.escheduler.api.utils.CheckUtils.checkOtherParams; -import static cn.escheduler.api.utils.CheckUtils.checkTaskNodeParameters; -import static cn.escheduler.common.Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID; - -/** - * process definition service - */ -@Service -public class ProcessDefinitionService extends BaseDAGService { - - private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionService.class); - - - @Autowired - private ProjectMapper projectMapper; - - @Autowired - private ProjectService projectService; - - @Autowired - private ProcessDefinitionMapper processDefineMapper; - - @Autowired - private ProcessInstanceMapper processInstanceMapper; - - - @Autowired - private TaskInstanceMapper taskInstanceMapper; - - @Autowired - private ScheduleMapper scheduleMapper; - - @Autowired - private ProcessDao processDao; - - @Autowired - private DataSourceMapper dataSourceMapper; - - @Autowired - private WorkerGroupMapper workerGroupMapper; - - /** - * create process definition - * - * @param loginUser - * @param projectName - * @param name - * @param processDefinitionJson - * @param desc - * @param locations - * @param connects - * @return - */ - public Map createProcessDefinition(User loginUser, String projectName, String name, - String processDefinitionJson, String desc, String locations, String connects) throws JsonProcessingException { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - // check project auth - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - - ProcessDefinition processDefine = new ProcessDefinition(); - Date now = new Date(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - Map checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); - if (checkProcessJson.get(Constants.STATUS) != Status.SUCCESS) { - return checkProcessJson; - } - - processDefine.setName(name); - processDefine.setReleaseState(ReleaseState.OFFLINE); - processDefine.setProjectId(project.getId()); - processDefine.setUserId(loginUser.getId()); - processDefine.setProcessDefinitionJson(processDefinitionJson); - processDefine.setDescription(desc); - processDefine.setLocations(locations); - processDefine.setConnects(connects); - processDefine.setTimeout(processData.getTimeout()); - processDefine.setTenantId(processData.getTenantId()); - - //custom global params - List globalParamsList = processData.getGlobalParams(); - if (globalParamsList != null && globalParamsList.size() > 0) { - Set globalParamsSet = new HashSet<>(globalParamsList); - globalParamsList = new ArrayList<>(globalParamsSet); - processDefine.setGlobalParamList(globalParamsList); - } - processDefine.setCreateTime(now); - processDefine.setUpdateTime(now); - processDefine.setFlag(Flag.YES); - processDefineMapper.insert(processDefine); - putMsg(result, Status.SUCCESS); - result.put("processDefinitionId",processDefine.getId()); - return result; - } - - - /** - * query proccess definition list - * - * @param loginUser - * @param projectName - * @return - */ - public Map queryProccessDefinitionList(User loginUser, String projectName) { - - HashMap result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - - List resourceList = processDefineMapper.queryAllDefinitionList(project.getId()); - result.put(Constants.DATA_LIST, resourceList); - putMsg(result, Status.SUCCESS); - - return result; - } - - - /** - * query proccess definition list paging - * - * @param loginUser - * @param projectName - * @param searchVal - * @param pageNo - * @param pageSize - * @param userId - * @return - */ - public Map queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - - Page page = new Page(pageNo, pageSize); - IPage processDefinitionIPage = processDefineMapper.queryDefineListPaging( - page, searchVal, userId, project.getId()); - - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int)processDefinitionIPage.getTotal()); - pageInfo.setLists(processDefinitionIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * query datail of process definition - * - * @param loginUser - * @param projectName - * @param processId - * @return - */ - public Map queryProccessDefinitionById(User loginUser, String projectName, Integer processId) { - - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - - ProcessDefinition processDefinition = processDefineMapper.selectById(processId); - if (processDefinition == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId); - } else { - result.put(Constants.DATA_LIST, processDefinition); - putMsg(result, Status.SUCCESS); - } - return result; - } - - /** - * update process definition - * - * @param loginUser - * @param projectName - * @param id - * @param name - * @param processDefinitionJson - * @param desc - * @param locations - * @param connects - * @return - */ - public Map updateProcessDefinition(User loginUser, String projectName, int id, String name, - String processDefinitionJson, String desc, - String locations, String connects) { - Map result = new HashMap<>(5); - - Project project = projectMapper.queryByName(projectName); - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - Map checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); - if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) { - return checkProcessJson; - } - ProcessDefinition processDefinition = processDao.findProcessDefineById(id); - if (processDefinition == null) { - // check process definition exists - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id); - return result; - } else if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { - // online can not permit edit - putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName()); - return result; - } else { - putMsg(result, Status.SUCCESS); - } - - ProcessDefinition processDefine = processDao.findProcessDefineById(id); - Date now = new Date(); - - processDefine.setId(id); - processDefine.setName(name); - processDefine.setReleaseState(ReleaseState.OFFLINE); - processDefine.setProjectId(project.getId()); - processDefine.setProcessDefinitionJson(processDefinitionJson); - processDefine.setDescription(desc); - processDefine.setLocations(locations); - processDefine.setConnects(connects); - processDefine.setTimeout(processData.getTimeout()); - processDefine.setTenantId(processData.getTenantId()); - - //custom global params - List globalParamsList = new ArrayList<>(); - if (processData.getGlobalParams() != null && processData.getGlobalParams().size() > 0) { - Set userDefParamsSet = new HashSet<>(processData.getGlobalParams()); - globalParamsList = new ArrayList<>(userDefParamsSet); - } - processDefine.setGlobalParamList(globalParamsList); - processDefine.setUpdateTime(now); - processDefine.setFlag(Flag.YES); - if (processDefineMapper.updateById(processDefine) > 0) { - putMsg(result, Status.SUCCESS); - - } else { - putMsg(result, UPDATE_PROCESS_DEFINITION_ERROR); - } - return result; - } - - /** - * verify process definition name unique - * - * @param loginUser - * @param projectName - * @param name - * @return - */ - public Map verifyProccessDefinitionName(User loginUser, String projectName, String name) { - - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(), name); - if (processDefinition == null) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.PROCESS_INSTANCE_EXIST, name); - } - return result; - } - - /** - * delete process definition by id - * - * @param loginUser - * @param projectName - * @param processDefinitionId - * @return - */ - @Transactional(value = "TransactionManager", rollbackFor = Exception.class) - public Map deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId); - - if (processDefinition == null) { - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionId); - return result; - } - - // Determine if the login user is the owner of the process definition - if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - // check process definition is already online - if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { - putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE,processDefinitionId); - return result; - } - - // get the timing according to the process definition - List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); - if (!schedules.isEmpty() && schedules.size() > 1) { - logger.warn("scheduler num is {},Greater than 1",schedules.size()); - putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR); - return result; - }else if(schedules.size() == 1){ - Schedule schedule = schedules.get(0); - if(schedule.getReleaseState() == ReleaseState.OFFLINE){ - scheduleMapper.deleteById(schedule.getId()); - }else if(schedule.getReleaseState() == ReleaseState.ONLINE){ - putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE,schedule.getId()); - return result; - } - } - - int delete = processDefineMapper.deleteById(processDefinitionId); - - if (delete > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR); - } - return result; - } - - /** - * batch delete process definition by ids - * - * @param loginUser - * @param projectName - * @param processDefinitionIds - * @return - */ - public Map batchDeleteProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds) { - - Map result = new HashMap<>(5); - - Map deleteReuslt = new HashMap<>(5); - - List deleteFailedIdList = new ArrayList(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - - if(StringUtils.isNotEmpty(processDefinitionIds)){ - String[] processInstanceIdArray = processDefinitionIds.split(","); - - for (String strProcessInstanceId:processInstanceIdArray) { - int processInstanceId = Integer.parseInt(strProcessInstanceId); - try { - deleteReuslt = deleteProcessDefinitionById(loginUser, projectName, processInstanceId); - if(!Status.SUCCESS.equals(deleteReuslt.get(Constants.STATUS))){ - deleteFailedIdList.add(processInstanceId); - logger.error((String)deleteReuslt.get(Constants.MSG)); - } - } catch (Exception e) { - deleteFailedIdList.add(processInstanceId); - } - } - } - - if(deleteFailedIdList.size() > 0){ - putMsg(result, Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR,StringUtils.join(deleteFailedIdList.toArray(),",")); - }else{ - putMsg(result, Status.SUCCESS); - } - return result; - } - - /** - * release process definition: online / offline - * - * @param loginUser - * @param projectName - * @param id - * @param releaseState - * @return - */ - @Transactional(value = "TransactionManager", rollbackFor = Exception.class) - public Map releaseProcessDefinition(User loginUser, String projectName, int id, int releaseState) { - HashMap result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - ReleaseState state = ReleaseState.getEnum(releaseState); - ProcessDefinition processDefinition = processDefineMapper.selectById(id); - - switch (state) { - case ONLINE: { - processDefinition.setReleaseState(state); - processDefineMapper.updateById(processDefinition); - break; - } - case OFFLINE: { - processDefinition.setReleaseState(state); - processDefineMapper.updateById(processDefinition); - List scheduleList = scheduleMapper.selectAllByProcessDefineArray( - new int[]{processDefinition.getId()} - ); - - for(Schedule schedule:scheduleList){ - logger.info("set schedule offline, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id); - // set status - schedule.setReleaseState(ReleaseState.OFFLINE); - scheduleMapper.updateById(schedule); - deleteSchedule(project.getId(), schedule.getId()); - } - break; - } - default: { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "releaseState"); - return result; - } - } - - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * export process definition by id - * - * @param loginUser - * @param projectName - * @param processDefinitionId - * @return - */ - public void exportProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId, HttpServletResponse response) { - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus == Status.SUCCESS) { - ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId); - if (processDefinition != null) { - JSONObject jsonObject = JSONUtils.parseObject(processDefinition.getProcessDefinitionJson()); - JSONArray jsonArray = (JSONArray) jsonObject.get("tasks"); - for (int i = 0; i < jsonArray.size(); i++) { - JSONObject taskNode = jsonArray.getJSONObject(i); - if (taskNode.get("type") != null && taskNode.get("type") != "") { - String taskType = taskNode.getString("type"); - if(taskType.equals(TaskType.SQL.name()) || taskType.equals(TaskType.PROCEDURE.name())){ - JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params")); - DataSource dataSource = dataSourceMapper.selectById((Integer) sqlParameters.get("datasource")); - if (dataSource != null) { - sqlParameters.put("datasourceName", dataSource.getName()); - } - taskNode.put("params", sqlParameters); - } - } - } - jsonObject.put("tasks", jsonArray); - processDefinition.setProcessDefinitionJson(jsonObject.toString()); - - Map row = new LinkedHashMap<>(); - row.put("projectName", processDefinition.getProjectName()); - row.put("processDefinitionName", processDefinition.getName()); - row.put("processDefinitionJson", processDefinition.getProcessDefinitionJson()); - row.put("processDefinitionDesc", processDefinition.getDescription()); - row.put("processDefinitionLocations", processDefinition.getLocations()); - row.put("processDefinitionConnects", processDefinition.getConnects()); - - List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); - if (schedules.size() > 0) { - Schedule schedule = schedules.get(0); - row.put("scheduleWarningType", schedule.getWarningType()); - row.put("scheduleWarningGroupId", schedule.getWarningGroupId()); - row.put("scheduleStartTime", schedule.getStartTime()); - row.put("scheduleEndTime", schedule.getEndTime()); - row.put("scheduleCrontab", schedule.getCrontab()); - row.put("scheduleFailureStrategy", schedule.getFailureStrategy()); - row.put("scheduleReleaseState", schedule.getReleaseState()); - row.put("scheduleProcessInstancePriority", schedule.getProcessInstancePriority()); - if(schedule.getId() == -1){ - row.put("scheduleWorkerGroupId", -1); - }else{ - WorkerGroup workerGroup = workerGroupMapper.selectById(schedule.getWorkerGroupId()); - if(workerGroup != null){ - row.put("scheduleWorkerGroupName", workerGroup.getName()); - } - } - - } - String rowsJson = JSONUtils.toJsonString(row); - response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE); - response.setHeader("Content-Disposition", "attachment;filename="+processDefinition.getName()+".json"); - BufferedOutputStream buff = null; - ServletOutputStream out = null; - try { - out = response.getOutputStream(); - buff = new BufferedOutputStream(out); - buff.write(rowsJson.getBytes("UTF-8")); - buff.flush(); - buff.close(); - } catch (IOException e) { - e.printStackTrace(); - }finally { - try { - buff.close(); - out.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - } - } - } - - @Transactional(value = "TransactionManager", rollbackFor = Exception.class) - public Map importProcessDefinition(User loginUser, MultipartFile file) { - Map result = new HashMap<>(5); - - JSONObject json = null; - try(InputStreamReader inputStreamReader = new InputStreamReader( file.getInputStream(), "UTF-8" )) { - BufferedReader streamReader = new BufferedReader(inputStreamReader); - StringBuilder respomseStrBuilder = new StringBuilder(); - String inputStr = ""; - while ((inputStr = streamReader.readLine())!= null){ - respomseStrBuilder.append( inputStr ); - } - json = JSONObject.parseObject( respomseStrBuilder.toString() ); - if(json != null){ - String projectName = null; - String processDefinitionName = null; - String processDefinitionJson = null; - String processDefinitionDesc = null; - String processDefinitionLocations = null; - String processDefinitionConnects = null; - - String scheduleWarningType = null; - String scheduleWarningGroupId = null; - String scheduleStartTime = null; - String scheduleEndTime = null; - String scheduleCrontab = null; - String scheduleFailureStrategy = null; - String scheduleReleaseState = null; - String scheduleProcessInstancePriority = null; - String scheduleWorkerGroupId = null; - String scheduleWorkerGroupName = null; - - if (ObjectUtils.allNotNull(json.get("projectName"))) { - projectName = json.get("projectName").toString(); - } else { - putMsg(result, Status.DATA_IS_NULL, "processDefinitionName"); - } - if (ObjectUtils.allNotNull(json.get("processDefinitionName"))) { - processDefinitionName = json.get("processDefinitionName").toString(); - } else { - putMsg(result, Status.DATA_IS_NULL, "processDefinitionName"); - } - if (ObjectUtils.allNotNull(json.get("processDefinitionJson"))) { - processDefinitionJson = json.get("processDefinitionJson").toString(); - } else { - putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson"); - } - if (ObjectUtils.allNotNull(json.get("processDefinitionDesc"))) { - processDefinitionDesc = json.get("processDefinitionDesc").toString(); - } - if (ObjectUtils.allNotNull(json.get("processDefinitionLocations"))) { - processDefinitionLocations = json.get("processDefinitionLocations").toString(); - } - if (ObjectUtils.allNotNull(json.get("processDefinitionConnects"))) { - processDefinitionConnects = json.get("processDefinitionConnects").toString(); - } - - JSONObject jsonObject = JSONUtils.parseObject(processDefinitionJson); - JSONArray jsonArray = (JSONArray) jsonObject.get("tasks"); - for (int j = 0; j < jsonArray.size(); j++) { - JSONObject taskNode = jsonArray.getJSONObject(j); - JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params")); - List dataSources = dataSourceMapper.queryDataSourceByName(sqlParameters.getString("datasourceName")); - if (dataSources.size() > 0) { - DataSource dataSource = dataSources.get(0); - sqlParameters.put("datasource", dataSource.getId()); - } - taskNode.put("params", sqlParameters); - } - jsonObject.put("tasks", jsonArray); - - Map createProcessDefinitionResult = createProcessDefinition(loginUser,projectName,processDefinitionName,jsonObject.toString(),processDefinitionDesc,processDefinitionLocations,processDefinitionConnects); - Integer processDefinitionId = null; - if (ObjectUtils.allNotNull(createProcessDefinitionResult.get("processDefinitionId"))) { - processDefinitionId = Integer.parseInt(createProcessDefinitionResult.get("processDefinitionId").toString()); - } - if (ObjectUtils.allNotNull(json.get("scheduleCrontab")) && processDefinitionId != null) { - Date now = new Date(); - Schedule scheduleObj = new Schedule(); - scheduleObj.setProjectName(projectName); - scheduleObj.setProcessDefinitionId(processDefinitionId); - scheduleObj.setProcessDefinitionName(processDefinitionName); - scheduleObj.setCreateTime(now); - scheduleObj.setUpdateTime(now); - scheduleObj.setUserId(loginUser.getId()); - scheduleObj.setUserName(loginUser.getUserName()); - - scheduleCrontab = json.get("scheduleCrontab").toString(); - scheduleObj.setCrontab(scheduleCrontab); - if (ObjectUtils.allNotNull(json.get("scheduleStartTime"))) { - scheduleStartTime = json.get("scheduleStartTime").toString(); - scheduleObj.setStartTime(DateUtils.stringToDate(scheduleStartTime)); - } - if (ObjectUtils.allNotNull(json.get("scheduleEndTime"))) { - scheduleEndTime = json.get("scheduleEndTime").toString(); - scheduleObj.setEndTime(DateUtils.stringToDate(scheduleEndTime)); - } - if (ObjectUtils.allNotNull(json.get("scheduleWarningType"))) { - scheduleWarningType = json.get("scheduleWarningType").toString(); - scheduleObj.setWarningType(WarningType.valueOf(scheduleWarningType)); - } - if (ObjectUtils.allNotNull(json.get("scheduleWarningGroupId"))) { - scheduleWarningGroupId = json.get("scheduleWarningGroupId").toString(); - scheduleObj.setWarningGroupId(Integer.parseInt(scheduleWarningGroupId)); - } - if (ObjectUtils.allNotNull(json.get("scheduleFailureStrategy"))) { - scheduleFailureStrategy = json.get("scheduleFailureStrategy").toString(); - scheduleObj.setFailureStrategy(FailureStrategy.valueOf(scheduleFailureStrategy)); - } - if (ObjectUtils.allNotNull(json.get("scheduleReleaseState"))) { - scheduleReleaseState = json.get("scheduleReleaseState").toString(); - scheduleObj.setReleaseState(ReleaseState.valueOf(scheduleReleaseState)); - } - if (ObjectUtils.allNotNull(json.get("scheduleProcessInstancePriority"))) { - scheduleProcessInstancePriority = json.get("scheduleProcessInstancePriority").toString(); - scheduleObj.setProcessInstancePriority(Priority.valueOf(scheduleProcessInstancePriority)); - } - if (ObjectUtils.allNotNull(json.get("scheduleWorkerGroupId"))) { - scheduleWorkerGroupId = json.get("scheduleWorkerGroupId").toString(); - if(scheduleWorkerGroupId != null){ - scheduleObj.setWorkerGroupId(Integer.parseInt(scheduleWorkerGroupId)); - }else{ - if (ObjectUtils.allNotNull(json.get("scheduleWorkerGroupName"))) { - scheduleWorkerGroupName = json.get("scheduleWorkerGroupName").toString(); - List workerGroups = workerGroupMapper.queryWorkerGroupByName(scheduleWorkerGroupName); - if(workerGroups.size() > 0){ - scheduleObj.setWorkerGroupId(workerGroups.get(0).getId()); - } - } - } - } - scheduleMapper.insert(scheduleObj); - } - }else{ - putMsg(result, Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR); - return result; - } - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - putMsg(result, Status.SUCCESS); - return result; - } - - - /** - * check the process definition node meets the specifications - * - * @param processData - * @param processDefinitionJson - * @return - */ - public Map checkProcessNodeList(ProcessData processData, String processDefinitionJson) { - - Map result = new HashMap<>(5); - try { - if (processData == null) { - logger.error("process data is null"); - putMsg(result,Status.DATA_IS_NOT_VALID, processDefinitionJson); - return result; - } - - // Check whether the task node is normal - List taskNodes = processData.getTasks(); - - if (taskNodes == null) { - logger.error("process node info is empty"); - putMsg(result, Status.DATA_IS_NULL, processDefinitionJson); - return result; - } - - // check has cycle - if (graphHasCycle(taskNodes)) { - logger.error("process DAG has cycle"); - putMsg(result, Status.PROCESS_NODE_HAS_CYCLE); - return result; - } - - // check whether the process definition json is normal - for (TaskNode taskNode : taskNodes) { - if (!checkTaskNodeParameters(taskNode.getParams(), taskNode.getType())) { - logger.error("task node {} parameter invalid", taskNode.getName()); - putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName()); - return result; - } - - // check extra params - checkOtherParams(taskNode.getExtras()); - } - putMsg(result,Status.SUCCESS); - } catch (Exception e) { - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, e.getMessage()); - } - return result; - } - - /** - * get task node details based on process definition - */ - public Map getTaskNodeListByDefinitionId(Integer defineId) throws Exception { - Map result = new HashMap<>(); - - ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); - if (processDefinition == null) { - logger.info("process define not exists"); - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition.getId()); - return result; - } - - - String processDefinitionJson = processDefinition.getProcessDefinitionJson(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - List taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); - - result.put(Constants.DATA_LIST, taskNodeList); - putMsg(result, Status.SUCCESS); - - return result; - - } - - /** - * get task node details based on process definition - */ - public Map getTaskNodeListByDefinitionIdList(String defineIdList) throws Exception { - Map result = new HashMap<>(); - - - Map> taskNodeMap = new HashMap<>(); - String[] idList = defineIdList.split(","); - List definitionIdList = Arrays.asList(idList); - List processDefinitionList = processDefineMapper.queryDefinitionListByIdList( definitionIdList.toArray(new Integer[definitionIdList.size()])); - if (processDefinitionList == null || processDefinitionList.size() ==0) { - logger.info("process definition not exists"); - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList); - return result; - } - - for(ProcessDefinition processDefinition : processDefinitionList){ - String processDefinitionJson = processDefinition.getProcessDefinitionJson(); - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - List taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); - taskNodeMap.put(processDefinition.getId(), taskNodeList); - } - - result.put(Constants.DATA_LIST, taskNodeMap); - putMsg(result, Status.SUCCESS); - - return result; - - } - - - /** - * query proccess definition all by project id - * - * @param projectId - * @return - */ - public Map queryProccessDefinitionAllByProjectId(Integer projectId) { - - HashMap result = new HashMap<>(5); - - List resourceList = processDefineMapper.queryAllDefinitionList(projectId); - result.put(Constants.DATA_LIST, resourceList); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * Encapsulates the TreeView structure - * - * @param processId - * @param limit - * @return - */ - public Map viewTree(Integer processId, Integer limit) throws Exception { - Map result = new HashMap<>(); - - ProcessDefinition processDefinition = processDefineMapper.selectById(processId); - if (processDefinition == null) { - logger.info("process define not exists"); - throw new RuntimeException("process define not exists"); - } - DAG dag = genDagGraph(processDefinition); - /** - * nodes that is running - */ - Map> runningNodeMap = new ConcurrentHashMap<>(); - - /** - * nodes that is waiting torun - */ - Map> waitingRunningNodeMap = new ConcurrentHashMap<>(); - - /** - * List of process instances - */ - List processInstanceList = processInstanceMapper.queryByProcessDefineId(processId, limit); - - - if (limit > processInstanceList.size()) { - limit = processInstanceList.size(); - } - - TreeViewDto parentTreeViewDto = new TreeViewDto(); - parentTreeViewDto.setName("DAG"); - parentTreeViewDto.setType(""); - // Specify the process definition, because it is a TreeView for a process definition - - for (int i = limit - 1; i >= 0; i--) { - ProcessInstance processInstance = processInstanceList.get(i); - - Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime(); - parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), "", processInstance.getState().toString() - , processInstance.getStartTime(), endTime, processInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime()))); - } - - List parentTreeViewDtoList = new ArrayList<>(); - parentTreeViewDtoList.add(parentTreeViewDto); - // Here is the encapsulation task instance - for (String startNode : dag.getBeginNode()) { - runningNodeMap.put(startNode, parentTreeViewDtoList); - } - - while (Stopper.isRunning()) { - Set postNodeList = null; - Iterator>> iter = runningNodeMap.entrySet().iterator(); - while (iter.hasNext()) { - Map.Entry> en = iter.next(); - String nodeName = en.getKey(); - parentTreeViewDtoList = en.getValue(); - - TreeViewDto treeViewDto = new TreeViewDto(); - treeViewDto.setName(nodeName); - TaskNode taskNode = dag.getNode(nodeName); - treeViewDto.setType(taskNode.getType()); - - - //set treeViewDto instances - for (int i = limit - 1; i >= 0; i--) { - ProcessInstance processInstance = processInstanceList.get(i); - TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), nodeName); - if (taskInstance == null) { - treeViewDto.getInstances().add(new Instance(-1, "not running", "null")); - } else { - Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); - Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); - - int subProcessId = 0; - /** - * if process is sub process, the return sub id, or sub id=0 - */ - if (taskInstance.getTaskType().equals(TaskType.SUB_PROCESS.name())) { - String taskJson = taskInstance.getTaskJson(); - taskNode = JSON.parseObject(taskJson, TaskNode.class); - subProcessId = Integer.parseInt(JSON.parseObject( - taskNode.getParams()).getString(CMDPARAM_SUB_PROCESS_DEFINE_ID)); - } - treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString() - , taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId)); - } - } - for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) { - pTreeViewDto.getChildren().add(treeViewDto); - } - postNodeList = dag.getSubsequentNodes(nodeName); - if (postNodeList != null && postNodeList.size() > 0) { - for (String nextNodeName : postNodeList) { - List treeViewDtoList = waitingRunningNodeMap.get(nextNodeName); - if (treeViewDtoList != null && treeViewDtoList.size() > 0) { - treeViewDtoList.add(treeViewDto); - waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); - } else { - treeViewDtoList = new ArrayList<>(); - treeViewDtoList.add(treeViewDto); - waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); - } - } - } - runningNodeMap.remove(nodeName); - } - - if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) { - break; - } else { - runningNodeMap.putAll(waitingRunningNodeMap); - waitingRunningNodeMap.clear(); - } - } - result.put(Constants.DATA_LIST, parentTreeViewDto); - result.put(Constants.STATUS, Status.SUCCESS); - result.put(Constants.MSG, Status.SUCCESS.getMsg()); - return result; - } - - - /** - * Generate the DAG Graph based on the process definition id - * - * @param processDefinition - * @return - * @throws Exception - */ - private DAG genDagGraph(ProcessDefinition processDefinition) throws Exception { - - String processDefinitionJson = processDefinition.getProcessDefinitionJson(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - List taskNodeList = processData.getTasks(); - - processDefinition.setGlobalParamList(processData.getGlobalParams()); - - - List taskNodeRelations = new ArrayList<>(); - - // Traverse node information and build relationships - for (TaskNode taskNode : taskNodeList) { - String preTasks = taskNode.getPreTasks(); - List preTasksList = JSONUtils.toList(preTasks, String.class); - - // If the dependency is not empty - if (preTasksList != null) { - for (String depNode : preTasksList) { - taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); - } - } - } - - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(taskNodeList); - - - // Generate concrete Dag to be executed - return genDagGraph(processDag); - - - } - - /** - * Generate the DAG of process - * - * @return DAG - */ - private DAG genDagGraph(ProcessDag processDag) { - DAG dag = new DAG<>(); - - /** - * Add the ndoes - */ - if (CollectionUtils.isNotEmpty(processDag.getNodes())) { - for (TaskNode node : processDag.getNodes()) { - dag.addNode(node.getName(), node); - } - } - - /** - * Add the edges - */ - if (CollectionUtils.isNotEmpty(processDag.getEdges())) { - for (TaskNodeRelation edge : processDag.getEdges()) { - dag.addEdge(edge.getStartNode(), edge.getEndNode()); - } - } - - return dag; - } - - - /** - * whether the graph has a ring - * - * @param taskNodeResponseList - * @return - */ - private boolean graphHasCycle(List taskNodeResponseList) { - DAG graph = new DAG<>(); - - // Fill the vertices - for (TaskNode taskNodeResponse : taskNodeResponseList) { - graph.addNode(taskNodeResponse.getName(), taskNodeResponse); - } - - // Fill edge relations - for (TaskNode taskNodeResponse : taskNodeResponseList) { - taskNodeResponse.getPreTasks(); - List preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(),String.class); - if (CollectionUtils.isNotEmpty(preTasks)) { - for (String preTask : preTasks) { - if (!graph.addEdge(preTask, taskNodeResponse.getName())) { - return true; - } - } - } - } - - return graph.hasCycle(); - } - -} - diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java deleted file mode 100644 index 23aa5b4317..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java +++ /dev/null @@ -1,723 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.dto.gantt.GanttDto; -import cn.escheduler.api.dto.gantt.Task; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.DependResult; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.Flag; -import cn.escheduler.common.enums.TaskType; -import cn.escheduler.common.graph.DAG; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.model.TaskNodeRelation; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.queue.ITaskQueue; -import cn.escheduler.common.queue.TaskQueueFactory; -import cn.escheduler.common.utils.*; -import cn.escheduler.common.utils.placeholder.BusinessTimeUtils; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.*; -import cn.escheduler.dao.mapper.*; -import com.alibaba.fastjson.JSON; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.Charset; -import java.text.ParseException; -import java.util.*; -import java.util.stream.Collectors; - -import static cn.escheduler.common.Constants.*; - -/** - * process instance service - */ -@Service -public class ProcessInstanceService extends BaseDAGService { - - - private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceService.class); - - @Autowired - ProjectMapper projectMapper; - - @Autowired - ProjectService projectService; - - @Autowired - ProcessDao processDao; - - @Autowired - ProcessInstanceMapper processInstanceMapper; - - @Autowired - ProcessDefinitionMapper processDefineMapper; - - @Autowired - ProcessDefinitionService processDefinitionService; - - @Autowired - ExecutorService execService; - - @Autowired - TaskInstanceMapper taskInstanceMapper; - - @Autowired - LoggerService loggerService; - - @Autowired - WorkerGroupMapper workerGroupMapper; - - /** - * query process instance by id - * - * @param loginUser - * @param projectName - * @param processId - * @return - */ - public Map queryProcessInstanceById(User loginUser, String projectName, Integer processId) { - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId); - String workerGroupName = ""; - if(processInstance.getWorkerGroupId() == -1){ - workerGroupName = DEFAULT; - }else{ - WorkerGroup workerGroup = workerGroupMapper.selectById(processInstance.getWorkerGroupId()); - if(workerGroup != null){ - workerGroupName = DEFAULT; - }else{ - workerGroupName = workerGroup.getName(); - } - } - processInstance.setWorkerGroupName(workerGroupName); - ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); - processInstance.setReceivers(processDefinition.getReceivers()); - processInstance.setReceiversCc(processDefinition.getReceiversCc()); - result.put(Constants.DATA_LIST, processInstance); - putMsg(result, Status.SUCCESS); - - return result; - } - - - /** - * paging query process instance list, filtering according to project, process definition, time range, keyword, process status - * - * @param loginUser - * @param projectName - * @param processDefineId - * @param startDate - * @param endDate - * @param searchVal - * @param stateType - * @param pageNo - * @param pageSize - * @return - */ - public Map queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, - String startDate, String endDate, - String searchVal, ExecutionStatus stateType, String host, - Integer pageNo, Integer pageSize) { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - int[] statusArray = null; - String statesStr = null; - // filter by state - if (stateType != null) { - statusArray = new int[]{stateType.ordinal()}; - } - if (statusArray != null) { - statesStr = Arrays.toString(statusArray).replace("[", "").replace("]", ""); - } - - Date start = null; - Date end = null; - try { - if (StringUtils.isNotEmpty(startDate)) { - start = DateUtils.getScheduleDate(startDate); - } - if (StringUtils.isNotEmpty(endDate)) { - end = DateUtils.getScheduleDate(endDate); - } - } catch (Exception e) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate"); - return result; - } - Page page = new Page(pageNo, pageSize); - - IPage processInstanceList = - processInstanceMapper.queryProcessInstanceListPaging(page, - project.getId(), processDefineId, searchVal, statusArray, host, start, end); - - Set exclusionSet = new HashSet(){{ - add(Constants.CLASS); - add("locations"); - add("connects"); - add("processInstanceJson"); - }}; - - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int)processInstanceList.getTotal()); - pageInfo.setLists(CollectionUtils.getListByExclusion(processInstanceList.getRecords(), exclusionSet)); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - return result; - } - - - - /** - * query task list by process instance id - * - * @param loginUser - * @param projectName - * @param processId - * @return - */ - public Map queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId); - List taskInstanceList = processDao.findValidTaskListByProcessId(processId); - AddDependResultForTaskList(taskInstanceList); - Map resultMap = new HashMap<>(); - resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); - resultMap.put(TASK_LIST, taskInstanceList); - result.put(Constants.DATA_LIST, resultMap); - - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * add dependent result for dependent task - * @param taskInstanceList - */ - private void AddDependResultForTaskList(List taskInstanceList) throws IOException { - for(TaskInstance taskInstance: taskInstanceList){ - if(taskInstance.getTaskType().toUpperCase().equals(TaskType.DEPENDENT.toString())){ - Result logResult = loggerService.queryLog( - taskInstance.getId(), 0, 4098); - if(logResult.getCode() == Status.SUCCESS.ordinal()){ - String log = (String) logResult.getData(); - Map resultMap = parseLogForDependentResult(log); - taskInstance.setDependentResult(JSONUtils.toJson(resultMap)); - } - } - } - } - - public Map parseLogForDependentResult(String log) throws IOException { - Map resultMap = new HashMap<>(); - if(StringUtils.isEmpty(log)){ - return resultMap; - } - - BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(Charset.forName("utf8"))), Charset.forName("utf8"))); - String line; - while ((line = br.readLine()) != null) { - if(line.contains(DEPENDENT_SPLIT)){ - String[] tmpStringArray = line.split(":\\|\\|"); - if(tmpStringArray.length != 2){ - continue; - } - String dependResultString = tmpStringArray[1]; - String[] dependStringArray = dependResultString.split(","); - if(dependStringArray.length != 2){ - continue; - } - String key = dependStringArray[0].trim(); - DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim()); - resultMap.put(key, dependResult); - } - } - return resultMap; - } - - - /** - * query sub process instance detail info by task id - * - * @param loginUser - * @param projectName - * @param taskId - * @return - */ - public Map querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId) { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - TaskInstance taskInstance = processDao.findTaskInstanceById(taskId); - if (taskInstance == null) { - putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); - return result; - } - if (!taskInstance.isSubProcess()) { - putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName()); - return result; - } - - ProcessInstance subWorkflowInstance = processDao.findSubProcessInstance( - taskInstance.getProcessInstanceId(), taskInstance.getId()); - if (subWorkflowInstance == null) { - putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId); - return result; - } - Map dataMap = new HashMap<>(); - dataMap.put("subProcessInstanceId", subWorkflowInstance.getId()); - result.put(Constants.DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * update process instance - * - * @param loginUser - * @param projectName - * @param processInstanceId - * @param processInstanceJson - * @param scheduleTime - * @param syncDefine - * @param flag - * @param locations - * @param connects - * @return - */ - public Map updateProcessInstance(User loginUser, String projectName, Integer processInstanceId, - String processInstanceJson, String scheduleTime, Boolean syncDefine, - Flag flag, String locations, String connects) throws ParseException { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - //check project permission - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - //check process instance exists - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); - if (processInstance == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); - return result; - } - - //check process instance status - if (!processInstance.getState().typeIsFinished()) { - putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, - processInstance.getName(), processInstance.getState().toString(), "update"); - return result; - } - Date schedule = null; - if (scheduleTime != null) { - schedule = DateUtils.getScheduleDate(scheduleTime); - } else { - schedule = processInstance.getScheduleTime(); - } - processInstance.setScheduleTime(schedule); - processInstance.setLocations(locations); - processInstance.setConnects(connects); - String globalParams = null; - String originDefParams = null; - int timeout = processInstance.getTimeout(); - ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); - if (StringUtils.isNotEmpty(processInstanceJson)) { - ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class); - //check workflow json is valid - Map checkFlowJson = processDefinitionService.checkProcessNodeList(processData, processInstanceJson); - if (checkFlowJson.get(Constants.STATUS) != Status.SUCCESS) { - return result; - } - - originDefParams = JSONUtils.toJson(processData.getGlobalParams()); - List globalParamList = processData.getGlobalParams(); - Map globalParamMap = globalParamList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); - globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, - processInstance.getCmdTypeIfComplement(), schedule); - timeout = processData.getTimeout(); - processInstance.setTimeout(timeout); - Tenant tenant = processDao.getTenantForProcess(processData.getTenantId(), - processDefinition.getUserId()); - if(tenant != null){ - processInstance.setTenantCode(tenant.getTenantCode()); - } - processInstance.setProcessInstanceJson(processInstanceJson); - processInstance.setGlobalParams(globalParams); - } -// int update = processDao.updateProcessInstance(processInstanceId, processInstanceJson, -// globalParams, schedule, flag, locations, connects); - int update = processDao.updateProcessInstance(processInstance); - int updateDefine = 1; - if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) { - processDefinition.setProcessDefinitionJson(processInstanceJson); - processDefinition.setGlobalParams(originDefParams); - processDefinition.setLocations(locations); - processDefinition.setConnects(connects); - processDefinition.setTimeout(timeout); - updateDefine = processDefineMapper.updateById(processDefinition); - } - if (update > 0 && updateDefine > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR); - } - - - return result; - - } - - /** - * query parent process instance detail info by sub process instance id - * - * @param loginUser - * @param projectName - * @param subId - * @return - */ - public Map queryParentInstanceBySubId(User loginUser, String projectName, Integer subId) { - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - ProcessInstance subInstance = processDao.findProcessInstanceDetailById(subId); - if (subInstance == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId); - return result; - } - if (subInstance.getIsSubProcess() == Flag.NO) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName()); - return result; - } - - ProcessInstance parentWorkflowInstance = processDao.findParentProcessInstance(subId); - if (parentWorkflowInstance == null) { - putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST); - return result; - } - Map dataMap = new HashMap<>(); - dataMap.put("parentWorkflowInstance", parentWorkflowInstance.getId()); - result.put(Constants.DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * delete process instance by id, at the same time,delete task instance and their mapping relation data - * @param loginUser - * @param projectName - * @param processInstanceId - * @param tasksQueue - * @return - */ - public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId,ITaskQueue tasksQueue) { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); - List taskInstanceList = processDao.findValidTaskListByProcessId(processInstanceId); - //process instance priority - int processInstancePriority = processInstance.getProcessInstancePriority().ordinal(); - if (processInstance == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); - return result; - } - - // delete zk queue - if (CollectionUtils.isNotEmpty(taskInstanceList)){ - for (TaskInstance taskInstance : taskInstanceList){ - // task instance priority - int taskInstancePriority = taskInstance.getTaskInstancePriority().ordinal(); - - StringBuilder nodeValueSb = new StringBuilder(100); - nodeValueSb.append(processInstancePriority) - .append(UNDERLINE) - .append(processInstanceId) - .append(UNDERLINE) - .append(taskInstancePriority) - .append(UNDERLINE) - .append(taskInstance.getId()) - .append(UNDERLINE); - - int taskWorkerGroupId = processDao.getTaskWorkerGroupId(taskInstance); - WorkerGroup workerGroup = workerGroupMapper.selectById(taskWorkerGroupId); - - if(workerGroup == null){ - nodeValueSb.append(DEFAULT_WORKER_ID); - }else { - - String ips = workerGroup.getIpList(); - StringBuilder ipSb = new StringBuilder(100); - String[] ipArray = ips.split(COMMA); - - for (String ip : ipArray) { - long ipLong = IpUtils.ipToLong(ip); - ipSb.append(ipLong).append(COMMA); - } - - if(ipSb.length() > 0) { - ipSb.deleteCharAt(ipSb.length() - 1); - } - nodeValueSb.append(ipSb); - } - - try { - logger.info("delete task queue node : {}",nodeValueSb.toString()); - tasksQueue.removeNode(cn.escheduler.common.Constants.SCHEDULER_TASKS_QUEUE, nodeValueSb.toString()); - }catch (Exception e){ - logger.error("delete task queue node : {}", nodeValueSb.toString()); - } - } - } - - // delete database cascade - int delete = processDao.deleteWorkProcessInstanceById(processInstanceId); - processDao.deleteAllSubWorkProcessByParentId(processInstanceId); - processDao.deleteWorkProcessMapByParentId(processInstanceId); - - if (delete > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR); - } - - return result; - } - - /** - * batch delete process instance by ids, at the same time,delete task instance and their mapping relation data - * - * @param loginUser - * @param projectName - * @param processInstanceIds - * @return - */ - public Map batchDeleteProcessInstanceByIds(User loginUser, String projectName, String processInstanceIds) { - // task queue - ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); - - Map result = new HashMap<>(5); - List deleteFailedIdList = new ArrayList(); - - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - if(StringUtils.isNotEmpty(processInstanceIds)){ - String[] processInstanceIdArray = processInstanceIds.split(","); - - for (String strProcessInstanceId:processInstanceIdArray) { - int processInstanceId = Integer.parseInt(strProcessInstanceId); - try { - deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue); - } catch (Exception e) { - deleteFailedIdList.add(processInstanceId); - } - } - } - if(deleteFailedIdList.size() > 0){ - putMsg(result, Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR,StringUtils.join(deleteFailedIdList.toArray(),",")); - }else{ - putMsg(result, Status.SUCCESS); - } - - return result; - } - - /** - * view process instance variables - * - * @param processInstanceId - * @return - */ - public Map viewVariables( Integer processInstanceId) throws Exception { - Map result = new HashMap<>(5); - - ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); - - if (processInstance == null) { - throw new RuntimeException("workflow instance is null"); - } - - Map timeParams = BusinessTimeUtils - .getBusinessTime(processInstance.getCmdTypeIfComplement(), - processInstance.getScheduleTime()); - - - String workflowInstanceJson = processInstance.getProcessInstanceJson(); - - ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class); - - String userDefinedParams = processInstance.getGlobalParams(); - - // global params - List globalParams = new ArrayList<>(); - - if (userDefinedParams != null && userDefinedParams.length() > 0) { - globalParams = JSON.parseArray(userDefinedParams, Property.class); - } - - - List taskNodeList = workflowData.getTasks(); - - // global param string - String globalParamStr = JSON.toJSONString(globalParams); - globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams); - globalParams = JSON.parseArray(globalParamStr, Property.class); - for (Property property : globalParams) { - timeParams.put(property.getProp(), property.getValue()); - } - - // local params - Map> localUserDefParams = new HashMap<>(); - for (TaskNode taskNode : taskNodeList) { - String parameter = taskNode.getParams(); - Map map = JSONUtils.toMap(parameter); - String localParams = map.get(LOCAL_PARAMS); - if (localParams != null && !localParams.isEmpty()) { - localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams); - List localParamsList = JSON.parseArray(localParams, Property.class); - Map localParamsMap = new HashMap<>(); - localParamsMap.put("taskType",taskNode.getType()); - localParamsMap.put("localParamsList",localParamsList); - if (localParamsList.size() > 0) { - localUserDefParams.put(taskNode.getName(), localParamsMap); - } - } - - } - - Map resultMap = new HashMap<>(); - - resultMap.put(GLOBAL_PARAMS, globalParams); - resultMap.put(LOCAL_PARAMS, localUserDefParams); - - result.put(Constants.DATA_LIST, resultMap); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * encapsulation gantt structure - * - * @param processInstanceId - * @return - * @throws Exception - */ - public Map viewGantt(Integer processInstanceId) throws Exception { - Map result = new HashMap<>(); - - ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); - - if (processInstance == null) { - throw new RuntimeException("workflow instance is null"); - } - - GanttDto ganttDto = new GanttDto(); - - DAG dag = processInstance2DAG(processInstance); - //topological sort - List nodeList = dag.topologicalSort(); - - ganttDto.setTaskNames(nodeList); - - List taskList = new ArrayList<>(); - for (String node : nodeList) { - TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node); - if (taskInstance == null) { - continue; - } - Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); - Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); - Task task = new Task(); - task.setTaskName(taskInstance.getName()); - task.getStartDate().add(startTime.getTime()); - task.getEndDate().add(endTime.getTime()); - task.setIsoStart(startTime); - task.setIsoEnd(endTime); - task.setStatus(taskInstance.getState().toString()); - task.setExecutionDate(taskInstance.getStartTime()); - task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime())); - taskList.add(task); - } - ganttDto.setTasks(taskList); - - result.put(Constants.DATA_LIST, ganttDto); - putMsg(result, Status.SUCCESS); - return result; - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/ProjectService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/ProjectService.java deleted file mode 100644 index 4096e96f73..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/ProjectService.java +++ /dev/null @@ -1,396 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.Project; -import cn.escheduler.dao.entity.ProjectUser; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.ProcessDefinitionMapper; -import cn.escheduler.dao.mapper.ProjectMapper; -import cn.escheduler.dao.mapper.ProjectUserMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.util.*; - -import static cn.escheduler.api.utils.CheckUtils.checkDesc; - -/** - * project service - *HttpTask./ -**/ -@Service -public class ProjectService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(ProjectService.class); - - @Autowired - private ProjectMapper projectMapper; - - @Autowired - private ProjectUserMapper projectUserMapper; - - @Autowired - private ProcessDefinitionMapper processDefinitionMapper; - - /** - * create project - * - * @param loginUser - * @param name - * @param desc - * @return - */ - public Map createProject(User loginUser, String name, String desc) { - - Map result = new HashMap<>(5); - Map descCheck = checkDesc(desc); - if (descCheck.get(Constants.STATUS) != Status.SUCCESS) { - return descCheck; - } - - Project project = projectMapper.queryByName(name); - if (project != null) { - putMsg(result, Status.PROJECT_ALREADY_EXISTS, name); - return result; - } - project = new Project(); - Date now = new Date(); - - project.setName(name); - project.setDesc(desc); - project.setUserId(loginUser.getId()); - project.setUserName(loginUser.getUserName()); - project.setCreateTime(now); - project.setUpdateTime(now); - - if (projectMapper.insert(project) > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.CREATE_PROJECT_ERROR); - } - return result; - } - - /** - * query project details by id - * - * @param projectId - * @return - */ - public Map queryById(Integer projectId) { - - Map result = new HashMap<>(5); - Project project = projectMapper.selectById(projectId); - - if (project != null) { - result.put(Constants.DATA_LIST, project); - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.PROJECT_NOT_FOUNT, projectId); - } - return result; - } - - /** - * check project and authorization - * 检查项目权限 - * - * @param loginUser - * @param project - * @param projectName - * @return - */ - public Map checkProjectAndAuth(User loginUser, Project project, String projectName) { - - Map result = new HashMap<>(5); - - if (project == null) { - putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); - } else if (!checkReadPermission(loginUser, project)) { - // check read permission - putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName); - }else { - putMsg(result, Status.SUCCESS); - } - - - return result; - } - - /** - * admin can view all projects - * 如果是管理员,则所有项目都可见 - * - * @param loginUser - * @param pageSize - * @param pageNo - * @param searchVal - * @return - */ - public Map queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) { - Map result = new HashMap<>(); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - - Page page = new Page(pageNo, pageSize); - - int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId(); - IPage projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal); - - List projectList = projectIPage.getRecords(); - if(userId != 0){ - for (Project project : projectList) { - project.setPerm(cn.escheduler.common.Constants.DEFAULT_ADMIN_PERMISSION); - } - } - pageInfo.setTotalCount((int)projectIPage.getTotal()); - pageInfo.setLists(projectList); - result.put(Constants.COUNT, (int)projectIPage.getTotal()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * delete project by id - * - * @param loginUser - * @param projectId - * @return - */ - public Map deleteProject(User loginUser, Integer projectId) { - Map result = new HashMap<>(5); - Project project = projectMapper.selectById(projectId); - Map checkResult = getCheckResult(loginUser, project); - if (checkResult != null) { - return checkResult; - } - List processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId); - - if(processDefinitionList.size() > 0){ - putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL); - return result; - } - - int delete = projectMapper.deleteById(projectId); - if (delete > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.DELETE_PROJECT_ERROR); - } - return result; - } - - /** - * get check result - * - * @param loginUser - * @param project - * @return - */ - private Map getCheckResult(User loginUser, Project project) { - Map checkResult = checkProjectAndAuth(loginUser, project, project.getName()); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - return checkResult; - } - return null; - } - - /** - * updateProcessInstance project - * - * @param loginUser - * @param projectId - * @param projectName - * @param desc - * @return - */ - public Map update(User loginUser, Integer projectId, String projectName, String desc) { - Map result = new HashMap<>(5); - - Project project = projectMapper.selectById(projectId); - Map checkResult = getCheckResult(loginUser, project); - if (checkResult != null) { - return checkResult; - } - Project tempProject = projectMapper.queryByName(projectName); - if (tempProject != null && tempProject.getId() != projectId) { - putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName); - return result; - } - project.setName(projectName); - project.setDesc(desc); - project.setUpdateTime(new Date()); - - int update = projectMapper.updateById(project); - if (update > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.UPDATE_PROJECT_ERROR); - } - return result; - } - - - /** - * query unauthorized project - * - * @param loginUser - * @param userId - * @return - */ - public Map queryUnauthorizedProject(User loginUser, Integer userId) { - Map result = new HashMap<>(5); - if (checkAdmin(loginUser, result)) { - return result; - } - /** - * query all project list except specified userId - */ - List projectList = projectMapper.queryProjectExceptUserId(userId); - List resultList = new ArrayList<>(); - Set projectSet = null; - if (projectList != null && projectList.size() > 0) { - projectSet = new HashSet<>(projectList); - - List authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId); - - resultList = getUnauthorizedProjects(projectSet, authedProjectList); - } - result.put(Constants.DATA_LIST, resultList); - putMsg(result,Status.SUCCESS); - return result; - } - - /** - * get unauthorized project - * - * @param projectSet - * @param authedProjectList - * @return - */ - private List getUnauthorizedProjects(Set projectSet, List authedProjectList) { - List resultList; - Set authedProjectSet = null; - if (authedProjectList != null && authedProjectList.size() > 0) { - authedProjectSet = new HashSet<>(authedProjectList); - projectSet.removeAll(authedProjectSet); - - } - resultList = new ArrayList<>(projectSet); - return resultList; - } - - - /** - * query authorized project - * - * @param loginUser - * @param userId - * @return - */ - public Map queryAuthorizedProject(User loginUser, Integer userId) { - Map result = new HashMap<>(); - - if (checkAdmin(loginUser, result)) { - return result; - } - - List projects = projectMapper.queryAuthedProjectListByUserId(userId); - result.put(Constants.DATA_LIST, projects); - putMsg(result,Status.SUCCESS); - - return result; - } - - - /** - * check whether have read permission - * - * @param user - * @param project - * @return - */ - private boolean checkReadPermission(User user, Project project) { - int permissionId = queryPermission(user, project); - return (permissionId & cn.escheduler.common.Constants.READ_PERMISSION) != 0; - } - - /** - * query permission id - * - * @param user - * @param project - * @return - */ - private int queryPermission(User user, Project project) { - if (user.getUserType() == UserType.ADMIN_USER) { - return cn.escheduler.common.Constants.READ_PERMISSION; - } - - if (project.getUserId() == user.getId()) { - return cn.escheduler.common.Constants.ALL_PERMISSIONS; - } - - ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId()); - - if (projectUser == null) { - return 0; - } - - return projectUser.getPerm(); - - } - - /** - * query all project list that have one or more process definitions. - * @return - */ - public Map queryAllProjectList() { - Map result = new HashMap<>(); - List projects = projectMapper.selectList(null); - List processDefinitions = processDefinitionMapper.selectList(null); - if(projects != null){ - Set set = new HashSet<>(); - for (ProcessDefinition processDefinition : processDefinitions){ - set.add(processDefinition.getProjectId()); - } - List tempDeletelist = new ArrayList(); - for (Project project : projects) { - if(!set.contains(project.getId())){ - tempDeletelist.add(project); - } - } - projects.removeAll(tempDeletelist); - } - result.put(Constants.DATA_LIST, projects); - putMsg(result,Status.SUCCESS); - return result; - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/QueueService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/QueueService.java deleted file mode 100644 index 8cf32eb699..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/QueueService.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.api.utils.Result; -import cn.escheduler.dao.entity.Queue; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.QueueMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * queue service - */ -@Service -public class QueueService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(TenantService.class); - - @Autowired - private QueueMapper queueMapper; - - /** - * query queue list - * - * @param loginUser - * @return - */ - public Map queryList(User loginUser) { - Map result = new HashMap<>(5); - if (checkAdmin(loginUser, result)) { - return result; - } - - List queueList = queueMapper.selectList(null); - result.put(Constants.DATA_LIST, queueList); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * query queue list paging - * - * @param loginUser - * @param searchVal - * @param pageNo - * @param pageSize - * @return - */ - public Map queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); - if (checkAdmin(loginUser, result)) { - return result; - } - - Page page = new Page(pageNo, pageSize); - - - IPage queueList = queueMapper.queryQueuePaging(page, searchVal); - - Integer count = (int)queueList.getTotal(); - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount(count); - pageInfo.setLists(queueList.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * create queue - * - * @param loginUser - * @param queue - * @param queueName - * @return - */ - public Map createQueue(User loginUser, String queue, String queueName) { - Map result = new HashMap<>(5); - if (checkAdmin(loginUser, result)) { - return result; - } - - if(StringUtils.isEmpty(queue)){ - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queue); - return result; - } - - if(StringUtils.isEmpty(queueName)){ - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queueName); - return result; - } - - if (checkQueueNameExist(queueName)) { - putMsg(result, Status.QUEUE_NAME_EXIST, queueName); - return result; - } - - if (checkQueueExist(queue)) { - putMsg(result, Status.QUEUE_VALUE_EXIST, queue); - return result; - } - - Queue queueObj = new Queue(); - Date now = new Date(); - - queueObj.setQueue(queue); - queueObj.setQueueName(queueName); - queueObj.setCreateTime(now); - queueObj.setUpdateTime(now); - - queueMapper.insert(queueObj); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * update queue - * - * @param loginUser - * @param id - * @param queue - * @param queueName - * @return - */ - public Map updateQueue(User loginUser, int id, String queue, String queueName) { - Map result = new HashMap<>(5); - if (checkAdmin(loginUser, result)) { - return result; - } - - Queue queueObj = queueMapper.selectById(id); - if (queueObj == null) { - putMsg(result, Status.QUEUE_NOT_EXIST, id); - return result; - } - - // whether queue value or queueName is changed - if (queue.equals(queueObj.getQueue()) && queueName.equals(queueObj.getQueueName())) { - putMsg(result, Status.NEED_NOT_UPDATE_QUEUE); - return result; - } - - // check queue name is exist - if (!queueName.equals(queueObj.getQueueName())) { - if(checkQueueNameExist(queueName)){ - putMsg(result, Status.QUEUE_NAME_EXIST, queueName); - return result; - } - } - - // check queue value is exist - if (!queue.equals(queueObj.getQueue())) { - if(checkQueueExist(queue)){ - putMsg(result, Status.QUEUE_VALUE_EXIST, queue); - return result; - } - } - - // update queue - Date now = new Date(); - queueObj.setQueue(queue); - queueObj.setQueueName(queueName); - queueObj.setUpdateTime(now); - - queueMapper.updateById(queueObj); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * verify queue and queueName - * - * @param queue - * @param queueName - * @return - */ - public Result verifyQueue(String queue, String queueName) { - Result result=new Result(); - - if (StringUtils.isEmpty(queue)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queue); - return result; - } - - if (StringUtils.isEmpty(queueName)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, queueName); - return result; - } - - - if(checkQueueNameExist(queueName)){ - logger.error("queue name {} has exist, can't create again.", queueName); - putMsg(result, Status.QUEUE_NAME_EXIST, queueName); - return result; - } - - if(checkQueueExist(queue)){ - logger.error("queue value {} has exist, can't create again.", queue); - putMsg(result, Status.QUEUE_VALUE_EXIST, queue); - return result; - } - - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * check queue exist - * - * @param queue - * @return - */ - private boolean checkQueueExist(String queue) { - return queueMapper.queryAllQueueList(queue, null).size()>0 ? false : true; - } - - /** - * check queue name exist - * - * @param queueName - * @return - */ - private boolean checkQueueNameExist(String queueName) { - return queueMapper.queryAllQueueList(null ,queueName).size()>0 ? false : true; - } - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java deleted file mode 100644 index d2e47c8826..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java +++ /dev/null @@ -1,898 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.ResourceType; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.common.utils.FileUtils; -import cn.escheduler.common.utils.HadoopUtils; -import cn.escheduler.common.utils.PropertyUtils; -import cn.escheduler.dao.mapper.*; -import cn.escheduler.dao.entity.Resource; -import cn.escheduler.dao.entity.Tenant; -import cn.escheduler.dao.entity.UdfFunc; -import cn.escheduler.dao.entity.User; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.collections.BeanMap; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.web.multipart.MultipartFile; - -import java.text.MessageFormat; -import java.util.*; - -import static cn.escheduler.api.enums.Status.UPDATE_RESOURCE_ERROR; -import static cn.escheduler.common.Constants.*; - -/** - * resources service - */ -@Service -public class ResourcesService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(ResourcesService.class); - - @Autowired - private ResourceMapper resourcesMapper; - - @Autowired - private UdfFuncMapper udfFunctionMapper; - - @Autowired - private TenantMapper tenantMapper; - - @Autowired - private UserMapper userMapper; - - @Autowired - private ResourceUserMapper resourceUserMapper; - - /** - * create resource - * - * @param loginUser - * @param type - * @param name - * @param desc - * @param file - * @return - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public Result createResource(User loginUser, - String name, - String desc, - ResourceType type, - MultipartFile file) { - Result result = new Result(); - - // if hdfs not startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - // file is empty - if (file.isEmpty()) { - logger.error("file is empty: {}", file.getOriginalFilename()); - putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); - return result; - } - - // file suffix - String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); - String nameSuffix = FileUtils.suffix(name); - - // determine file suffix - if (!StringUtils.equals(fileSuffix, nameSuffix)) { - /** - * rename file suffix and original suffix must be consistent - * 重命名的后缀必须与原文件后缀一致 - */ - logger.error("rename file suffix and original suffix must be consistent: {}", file.getOriginalFilename()); - putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); - return result; - } - // - //If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar - if (Constants.UDF.equals(type.name())) { - if (!JAR.equalsIgnoreCase(fileSuffix)) { - logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); - putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); - return result; - } - } - if (file.getSize() > Constants.maxFileSize) { - logger.error("file size is too large: {}", file.getOriginalFilename()); - putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); - return result; - } - - // check resoure name exists - if (checkResourceExists(name, 0, type.ordinal())) { - logger.error("resource {} has exist, can't recreate", name); - putMsg(result, Status.RESOURCE_EXIST); - return result; - } - - Date now = new Date(); - - Resource resource = new Resource(name,file.getOriginalFilename(),desc,loginUser.getId(),type,file.getSize(),now,now); - - try { - resourcesMapper.insert(resource); - - putMsg(result, Status.SUCCESS); - Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap(); - for (Object key : dataMap.keySet()) { - if (!"class".equalsIgnoreCase(key.toString())) { - resultMap.put(key.toString(), dataMap.get(key)); - } - } - result.setData(resultMap); - } catch (Exception e) { - logger.error("resource already exists, can't recreate ", e); - putMsg(result, Status.CREATE_RESOURCE_ERROR); - return result; - } - - // fail upload - if (!upload(loginUser, name, file, type)) { - logger.error("upload resource: {} file: {} failed.", name, file.getOriginalFilename()); - putMsg(result, Status.HDFS_OPERATION_ERROR); - throw new RuntimeException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); - } - return result; - } - - private boolean checkResourceExists(String alias, int userId, int type ){ - - List resources = resourcesMapper.queryResourceList(alias, userId, type); - if (resources != null && resources.size() > 0) { - return true; - } - return false; - } - - - - /** - * update resource - * - * @param loginUser - * @param type - * @param name - * @param desc - * @return - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public Result updateResource(User loginUser, - int resourceId, - String name, - String desc, - ResourceType type) { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - Resource resource = resourcesMapper.selectById(resourceId); - String originResourceName = resource.getAlias(); - if (resource == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - if (loginUser.getId() != resource.getUserId()) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - - if (name.equals(resource.getAlias()) && desc.equals(resource.getDesc())) { - putMsg(result, Status.SUCCESS); - return result; - } - - //check resource aleady exists - if (!resource.getAlias().equals(name)) { - if (checkResourceExists(name, 0, type.ordinal())) { - logger.error("resource {} already exists, can't recreate", name); - putMsg(result, Status.RESOURCE_EXIST); - return result; - } - } - - // updateProcessInstance data - Date now = new Date(); - resource.setAlias(name); - resource.setDesc(desc); - resource.setUpdateTime(now); - - try { - resourcesMapper.updateById(resource); - - putMsg(result, Status.SUCCESS); - Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap<>(5); - for (Object key : dataMap.keySet()) { - if (!Constants.CLASS.equalsIgnoreCase(key.toString())) { - resultMap.put(key.toString(), dataMap.get(key)); - } - } - result.setData(resultMap); - } catch (Exception e) { - logger.error(UPDATE_RESOURCE_ERROR.getMsg(), e); - putMsg(result, Status.UPDATE_RESOURCE_ERROR); - return result; - } - // if name unchanged, return directly without moving on HDFS - if (originResourceName.equals(name)) { - return result; - } - - // hdfs move - // query tenant by user id - User user = userMapper.queryDetailsById(resource.getUserId()); - String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); - // get file hdfs path - // delete hdfs file by type - String originHdfsFileName = ""; - String destHdfsFileName = ""; - if (resource.getType().equals(ResourceType.FILE)) { - originHdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, originResourceName); - destHdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, name); - } else if (resource.getType().equals(ResourceType.UDF)) { - originHdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, originResourceName); - destHdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, name); - } - try { - if (HadoopUtils.getInstance().exists(originHdfsFileName)) { - logger.info("hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName); - HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true); - } else { - logger.error("{} not exist", originHdfsFileName); - putMsg(result,Status.RESOURCE_NOT_EXIST); - } - } catch (Exception e) { - logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e); - putMsg(result,Status.HDFS_COPY_FAIL); - } - - return result; - - } - - /** - * query resources list paging - * - * @param loginUser - * @param type - * @param searchVal - * @param pageNo - * @param pageSize - * @return - */ - public Map queryResourceListPaging(User loginUser, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { - - HashMap result = new HashMap<>(5); - Page page = new Page(pageNo, pageSize); - int userId = loginUser.getId(); - if (isAdmin(loginUser)) { - userId= 0; - } - IPage resourceIPage = resourcesMapper.queryResourcePaging(page, - userId, type.ordinal(), searchVal); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int)resourceIPage.getTotal()); - pageInfo.setLists(resourceIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result,Status.SUCCESS); - return result; - } - - /** - * upload file to hdfs - * - * @param loginUser - * @param name - * @param file - */ - private boolean upload(User loginUser, String name, MultipartFile file, ResourceType type) { - // save to local - String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); - String nameSuffix = FileUtils.suffix(name); - - // determine file suffix - if (!StringUtils.equals(fileSuffix, nameSuffix)) { - return false; - } - // query tenant - String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); - // random file name - String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); - - - // save file to hdfs, and delete original file - String hdfsFilename = ""; - String resourcePath = ""; - if (type.equals(ResourceType.FILE)) { - hdfsFilename = HadoopUtils.getHdfsFilename(tenantCode, name); - resourcePath = HadoopUtils.getHdfsResDir(tenantCode); - } else if (type.equals(ResourceType.UDF)) { - hdfsFilename = HadoopUtils.getHdfsUdfFilename(tenantCode, name); - resourcePath = HadoopUtils.getHdfsUdfDir(tenantCode); - } - try { - // if tenant dir not exists - if (!HadoopUtils.getInstance().exists(resourcePath)) { - createTenantDirIfNotExists(tenantCode); - } - cn.escheduler.api.utils.FileUtils.copyFile(file, localFilename); - HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true); - } catch (Exception e) { - logger.error(e.getMessage(), e); - return false; - } - return true; - } - - /** - * query resource list - * - * @param loginUser - * @param type - * @return - */ - public Map queryResourceList(User loginUser, ResourceType type) { - - Map result = new HashMap<>(5); - List resourceList; - int userId = loginUser.getId(); - if(isAdmin(loginUser)){ - userId = 0; - } - resourceList = resourcesMapper.queryResourceList(null, userId, type.ordinal()); - result.put(Constants.DATA_LIST, resourceList); - putMsg(result,Status.SUCCESS); - - return result; - } - - /** - * delete resource - * - * @param loginUser - * @param resourceId - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public Result delete(User loginUser, int resourceId) throws Exception { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - //get resource and hdfs path - Resource resource = resourcesMapper.selectById(resourceId); - if (resource == null) { - logger.error("resource file not exist, resource id {}", resourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - if (loginUser.getId() != resource.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); - String hdfsFilename = ""; - - // delete hdfs file by type - hdfsFilename = getHdfsFileName(resource, tenantCode, hdfsFilename); - - //delete data in database - resourcesMapper.deleteById(resourceId); - resourceUserMapper.deleteResourceUser(0, resourceId); - //delete file on hdfs - HadoopUtils.getInstance().delete(hdfsFilename, false); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * verify resource by name and type - * @param name - * @param type - * @param loginUser - * @return - */ - public Result verifyResourceName(String name, ResourceType type,User loginUser) { - Result result = new Result(); - putMsg(result, Status.SUCCESS); - if (checkResourceExists(name, 0, type.ordinal())) { - logger.error("resource type:{} name:{} has exist, can't create again.", type, name); - putMsg(result, Status.RESOURCE_EXIST); - } else { - // query tenant - Tenant tenant = tenantMapper.queryById(loginUser.getTenantId()); - if(tenant != null){ - String tenantCode = tenant.getTenantCode(); - - try { - String hdfsFilename = getHdfsFileName(type,tenantCode,name); - if(HadoopUtils.getInstance().exists(hdfsFilename)){ - logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, name,hdfsFilename); - putMsg(result, Status.RESOURCE_FILE_EXIST,hdfsFilename); - } - - } catch (Exception e) { - logger.error(e.getMessage(),e); - putMsg(result,Status.HDFS_OPERATION_ERROR); - } - }else{ - putMsg(result,Status.TENANT_NOT_EXIST); - } - } - - - return result; - } - - /** - * verify resource by name and type - * - * @param name - * @return - */ - public Result verifyResourceName(String name, ResourceType type) { - Result result = new Result(); - if (checkResourceExists(name, 0, type.ordinal())) { - logger.error("resource type:{} name:{} has exist, can't create again.", type, name); - putMsg(result, Status.RESOURCE_EXIST); - } else { - putMsg(result, Status.SUCCESS); - } - - return result; - } - - /** - * view resource file online - * - * @param resourceId - * @return - */ - public Result readResource(int resourceId, int skipLineNum, int limit) { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - // get resource by id - Resource resource = resourcesMapper.selectById(resourceId); - if (resource == null) { - logger.error("resouce file not exist, resource id {}", resourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - //check preview or not by file suffix - String nameSuffix = FileUtils.suffix(resource.getAlias()); - String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); - if (StringUtils.isNotEmpty(resourceViewSuffixs)) { - List strList = Arrays.asList(resourceViewSuffixs.split(",")); - if (!strList.contains(nameSuffix)) { - logger.error("resouce suffix {} not support view, resource id {}", nameSuffix, resourceId); - putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); - return result; - } - } - - User user = userMapper.queryDetailsById(resource.getUserId()); - String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); - // hdfs path - String hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); - logger.info("resource hdfs path is {} ", hdfsFileName); - try { - if(HadoopUtils.getInstance().exists(hdfsFileName)){ - List content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit); - - putMsg(result, Status.SUCCESS); - Map map = new HashMap<>(); - map.put(ALIAS, resource.getAlias()); - map.put(CONTENT, StringUtils.join(content.toArray(), "\n")); - result.setData(map); - }else{ - logger.error("read file {} not exist in hdfs", hdfsFileName); - putMsg(result, Status.RESOURCE_FILE_NOT_EXIST,hdfsFileName); - } - - } catch (Exception e) { - logger.error(String.format("Resource %s read failed", hdfsFileName), e); - putMsg(result, Status.HDFS_OPERATION_ERROR); - } - - return result; - } - - /** - * create resource file online - * - * @param loginUser - * @param type - * @param fileName - * @param fileSuffix - * @param desc - * @param content - * @return - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content) { - Result result = new Result(); - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - //check file suffix - String nameSuffix = fileSuffix.trim(); - String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); - if (StringUtils.isNotEmpty(resourceViewSuffixs)) { - List strList = Arrays.asList(resourceViewSuffixs.split(",")); - if (!strList.contains(nameSuffix)) { - logger.error("resouce suffix {} not support create", nameSuffix); - putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); - return result; - } - } - - String name = fileName.trim() + "." + nameSuffix; - - result = verifyResourceName(name,type,loginUser); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - return result; - } - - // save data - Date now = new Date(); - Resource resource = new Resource(name,name,desc,loginUser.getId(),type,content.getBytes().length,now,now); - - resourcesMapper.insert(resource); - - putMsg(result, Status.SUCCESS); - Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap<>(5); - for (Object key : dataMap.keySet()) { - if (!Constants.CLASS.equalsIgnoreCase(key.toString())) { - resultMap.put(key.toString(), dataMap.get(key)); - } - } - result.setData(resultMap); - - String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); - - result = uploadContentToHdfs(name, tenantCode, content); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - throw new RuntimeException(result.getMsg()); - } - return result; - } - - /** - * updateProcessInstance resource - * - * @param resourceId - * @return - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public Result updateResourceContent(int resourceId, String content) { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - Resource resource = resourcesMapper.selectById(resourceId); - if (resource == null) { - logger.error("read file not exist, resource id {}", resourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - //check can edit by file suffix - String nameSuffix = FileUtils.suffix(resource.getAlias()); - String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); - if (StringUtils.isNotEmpty(resourceViewSuffixs)) { - List strList = Arrays.asList(resourceViewSuffixs.split(",")); - if (!strList.contains(nameSuffix)) { - logger.error("resouce suffix {} not support updateProcessInstance, resource id {}", nameSuffix, resourceId); - putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); - return result; - } - } - - resource.setSize(content.getBytes().length); - resource.setUpdateTime(new Date()); - resourcesMapper.updateById(resource); - - User user = userMapper.queryDetailsById(resource.getUserId()); - String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); - - result = uploadContentToHdfs(resource.getAlias(), tenantCode, content); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - throw new RuntimeException(result.getMsg()); - } - return result; - } - - /** - * @param resourceName - * @param tenantCode - * @param content - * @return - */ - private Result uploadContentToHdfs(String resourceName, String tenantCode, String content) { - Result result = new Result(); - String localFilename = ""; - String hdfsFileName = ""; - try { - localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); - - if (!FileUtils.writeContent2File(content, localFilename)) { - // write file fail - logger.error("file {} fail, content is {}", localFilename, content); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - // get file hdfs path - hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resourceName); - String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); - logger.info("resource hdfs path is {} ", hdfsFileName); - - HadoopUtils hadoopUtils = HadoopUtils.getInstance(); - if (!hadoopUtils.exists(resourcePath)) { - // create if tenant dir not exists - createTenantDirIfNotExists(tenantCode); - } - if (hadoopUtils.exists(hdfsFileName)) { - hadoopUtils.delete(hdfsFileName, false); - } - - hadoopUtils.copyLocalToHdfs(localFilename, hdfsFileName, true, true); - } catch (Exception e) { - logger.error(e.getMessage(), e); - result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); - result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, hdfsFileName)); - return result; - } - putMsg(result, Status.SUCCESS); - return result; - } - - - /** - * download file - * - * @param resourceId - * @return - */ - public org.springframework.core.io.Resource downloadResource(int resourceId) throws Exception { - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - throw new RuntimeException("hdfs not startup"); - } - - Resource resource = resourcesMapper.selectById(resourceId); - if (resource == null) { - logger.error("download file not exist, resource id {}", resourceId); - return null; - } - User user = userMapper.queryDetailsById(resource.getUserId()); - String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); - - String hdfsFileName = ""; - hdfsFileName = getHdfsFileName(resource, tenantCode, hdfsFileName); - - String localFileName = FileUtils.getDownloadFilename(resource.getAlias()); - logger.info("resource hdfs path is {} ", hdfsFileName); - - HadoopUtils.getInstance().copyHdfsToLocal(hdfsFileName, localFileName, false, true); - org.springframework.core.io.Resource file = cn.escheduler.api.utils.FileUtils.file2Resource(localFileName); - return file; - } - - - /** - * unauthorized file - * - * @param loginUser - * @param userId - * @return - */ - public Map unauthorizedFile(User loginUser, Integer userId) { - - Map result = new HashMap<>(); - if (checkAdmin(loginUser, result)) { - return result; - } - List resourceList = resourcesMapper.queryResourceExceptUserId(userId); - List list ; - if (resourceList != null && resourceList.size() > 0) { - Set resourceSet = new HashSet<>(resourceList); - List authedResourceList = resourcesMapper.queryAuthorizedResourceList(userId); - - getAuthorizedResourceList(resourceSet, authedResourceList); - list = new ArrayList<>(resourceSet); - }else { - list = new ArrayList<>(0); - } - - result.put(Constants.DATA_LIST, list); - putMsg(result,Status.SUCCESS); - return result; - } - - - - - /** - * unauthorized udf function - * - * @param loginUser - * @param userId - * @return - */ - public Map unauthorizedUDFFunction(User loginUser, Integer userId) { - Map result = new HashMap<>(5); - //only admin can operate - if (checkAdmin(loginUser, result)) { - return result; - } - - List udfFuncList = udfFunctionMapper.queryUdfFuncExceptUserId(userId); - List resultList = new ArrayList<>(); - Set udfFuncSet = null; - if (udfFuncList != null && udfFuncList.size() > 0) { - udfFuncSet = new HashSet<>(udfFuncList); - - List authedUDFFuncList = udfFunctionMapper.queryAuthedUdfFunc(userId); - - getAuthorizedResourceList(udfFuncSet, authedUDFFuncList); - resultList = new ArrayList<>(udfFuncSet); - } - result.put(Constants.DATA_LIST, resultList); - putMsg(result,Status.SUCCESS); - return result; - } - - - - - /** - * authorized udf function - * - * @param loginUser - * @param userId - * @return - */ - public Map authorizedUDFFunction(User loginUser, Integer userId) { - Map result = new HashMap<>(); - if (checkAdmin(loginUser, result)) { - return result; - } - List udfFuncs = udfFunctionMapper.queryAuthedUdfFunc(userId); - result.put(Constants.DATA_LIST, udfFuncs); - putMsg(result,Status.SUCCESS); - return result; - } - - - /** - * authorized file - * - * @param loginUser - * @param userId - * @return - */ - public Map authorizedFile(User loginUser, Integer userId) { - Map result = new HashMap<>(5); - if (checkAdmin(loginUser, result)){ - return result; - } - List authedResources = resourcesMapper.queryAuthorizedResourceList(userId); - - result.put(Constants.DATA_LIST, authedResources); - putMsg(result,Status.SUCCESS); - return result; - } - - /** - * get hdfs file name - * - * @param resource - * @param tenantCode - * @param hdfsFileName - * @return - */ - private String getHdfsFileName(Resource resource, String tenantCode, String hdfsFileName) { - if (resource.getType().equals(ResourceType.FILE)) { - hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); - } else if (resource.getType().equals(ResourceType.UDF)) { - hdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, resource.getAlias()); - } - return hdfsFileName; - } - - /** - * get hdfs file name - * - * @param resourceType - * @param tenantCode - * @param hdfsFileName - * @return - */ - private String getHdfsFileName(ResourceType resourceType, String tenantCode, String hdfsFileName) { - if (resourceType.equals(ResourceType.FILE)) { - hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, hdfsFileName); - } else if (resourceType.equals(ResourceType.UDF)) { - hdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, hdfsFileName); - } - return hdfsFileName; - } - - /** - * get authorized resource list - * - * @param resourceSet - * @param authedResourceList - */ - private void getAuthorizedResourceList(Set resourceSet, List authedResourceList) { - Set authedResourceSet = null; - if (authedResourceList != null && authedResourceList.size() > 0) { - authedResourceSet = new HashSet<>(authedResourceList); - resourceSet.removeAll(authedResourceSet); - - } - } - -} \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java deleted file mode 100644 index 8e34700078..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java +++ /dev/null @@ -1,594 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - - -import cn.escheduler.api.dto.ScheduleParam; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.common.enums.FailureStrategy; -import cn.escheduler.common.enums.Priority; -import cn.escheduler.common.enums.ReleaseState; -import cn.escheduler.common.enums.WarningType; -import cn.escheduler.common.model.MasterServer; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.Project; -import cn.escheduler.dao.entity.Schedule; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.ProcessDefinitionMapper; -import cn.escheduler.dao.mapper.ProjectMapper; -import cn.escheduler.dao.mapper.ScheduleMapper; -import cn.escheduler.dao.utils.cron.CronUtils; -import cn.escheduler.server.quartz.ProcessScheduleJob; -import cn.escheduler.server.quartz.QuartzExecutors; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang3.StringUtils; -import org.quartz.CronExpression; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import java.io.IOException; -import java.text.ParseException; -import java.util.*; - -/** - * scheduler service - */ -@Service -public class SchedulerService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(SchedulerService.class); - - @Autowired - private ProjectService projectService; - - @Autowired - private ExecutorService executorService; - - @Autowired - private MonitorService monitorService; - - @Autowired - private ProcessDao processDao; - - @Autowired - private ScheduleMapper scheduleMapper; - - @Autowired - private ProjectMapper projectMapper; - - @Autowired - private ProcessDefinitionMapper processDefinitionMapper; - - /** - * save schedule - * - * @param loginUser - * @param projectName - * @param processDefineId - * @param schedule - * @param warningType - * @param warningGroupId - * @param failureStrategy - * @return - */ - @Transactional(value = "TransactionManager", rollbackFor = Exception.class) - public Map insertSchedule(User loginUser, String projectName, Integer processDefineId, String schedule, WarningType warningType, - int warningGroupId, FailureStrategy failureStrategy, - String receivers, String receiversCc, Priority processInstancePriority, int workerGroupId) throws IOException { - - Map result = new HashMap(5); - - Project project = projectMapper.queryByName(projectName); - - // check project auth - Map checkResult = checkAuth(loginUser, projectName, project); - if (checkResult != null) { - return checkResult; - } - - // check work flow define release state - ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId); - result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId); - if (result.get(Constants.STATUS) != Status.SUCCESS) { - return result; - } - - Schedule scheduleObj = new Schedule(); - Date now = new Date(); - - scheduleObj.setProjectName(projectName); - scheduleObj.setProcessDefinitionId(processDefinition.getId()); - scheduleObj.setProcessDefinitionName(processDefinition.getName()); - - ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); - if (DateUtils.differSec(scheduleParam.getStartTime(),scheduleParam.getEndTime()) == 0) { - logger.warn("The start time must not be the same as the end"); - putMsg(result,Status.SCHEDULE_START_TIME_END_TIME_SAME); - return result; - } - scheduleObj.setStartTime(scheduleParam.getStartTime()); - scheduleObj.setEndTime(scheduleParam.getEndTime()); - if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { - logger.error(scheduleParam.getCrontab() + " verify failure"); - - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); - return result; - } - scheduleObj.setCrontab(scheduleParam.getCrontab()); - scheduleObj.setWarningType(warningType); - scheduleObj.setWarningGroupId(warningGroupId); - scheduleObj.setFailureStrategy(failureStrategy); - scheduleObj.setCreateTime(now); - scheduleObj.setUpdateTime(now); - scheduleObj.setUserId(loginUser.getId()); - scheduleObj.setUserName(loginUser.getUserName()); - scheduleObj.setReleaseState(ReleaseState.OFFLINE); - scheduleObj.setProcessInstancePriority(processInstancePriority); - scheduleObj.setWorkerGroupId(workerGroupId); - scheduleMapper.insert(scheduleObj); - - /** - * updateProcessInstance receivers and cc by process definition id - */ - processDefinition.setReceivers(receivers); - processDefinition.setReceiversCc(receiversCc); - processDefinitionMapper.updateById(processDefinition); - putMsg(result, Status.SUCCESS); - - return result; - } - - - /** - * updateProcessInstance schedule - * - * @param loginUser - * @param projectName - * @param id - * @param scheduleExpression - * @param warningType - * @param warningGroupId - * @param failureStrategy - * @param scheduleStatus - * @param workerGroupId - * @return - */ - @Transactional(value = "TransactionManager", rollbackFor = Exception.class) - public Map updateSchedule(User loginUser, String projectName, Integer id, String scheduleExpression, WarningType warningType, - int warningGroupId, FailureStrategy failureStrategy, - String receivers, String receiversCc, ReleaseState scheduleStatus, - Priority processInstancePriority, int workerGroupId) throws IOException { - Map result = new HashMap(5); - - Project project = projectMapper.queryByName(projectName); - - // check project auth - Map checkResult = checkAuth(loginUser, projectName, project); - if (checkResult != null) { - return checkResult; - } - - // check schedule exists - Schedule schedule = scheduleMapper.selectById(id); - - if (schedule == null) { - putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); - return result; - } - - ProcessDefinition processDefinition = processDao.findProcessDefineById(schedule.getProcessDefinitionId()); - if (processDefinition == null) { - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); - return result; - } - - /** - * scheduling on-line status forbid modification - */ - if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { - return result; - } - - Date now = new Date(); - - // updateProcessInstance param - if (StringUtils.isNotEmpty(scheduleExpression)) { - ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); - if (DateUtils.differSec(scheduleParam.getStartTime(),scheduleParam.getEndTime()) == 0) { - logger.warn("The start time must not be the same as the end"); - putMsg(result,Status.SCHEDULE_START_TIME_END_TIME_SAME); - return result; - } - schedule.setStartTime(scheduleParam.getStartTime()); - schedule.setEndTime(scheduleParam.getEndTime()); - if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { - putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); - return result; - } - schedule.setCrontab(scheduleParam.getCrontab()); - } - - if (warningType != null) { - schedule.setWarningType(warningType); - } - - schedule.setWarningGroupId(warningGroupId); - - if (failureStrategy != null) { - schedule.setFailureStrategy(failureStrategy); - } - - if (scheduleStatus != null) { - schedule.setReleaseState(scheduleStatus); - } - schedule.setWorkerGroupId(workerGroupId); - schedule.setUpdateTime(now); - schedule.setProcessInstancePriority(processInstancePriority); - scheduleMapper.updateById(schedule); - - /** - * updateProcessInstance recipients and cc by process definition ID - */ - processDefinition.setReceivers(receivers); - processDefinition.setReceiversCc(receiversCc); - processDefinitionMapper.updateById(processDefinition); - - putMsg(result, Status.SUCCESS); - return result; - } - - - /** - * set schedule online or offline - * - * @param loginUser - * @param projectName - * @param id - * @param scheduleStatus - * @return - */ - @Transactional(value = "TransactionManager", rollbackFor = Exception.class) - public Map setScheduleState(User loginUser, String projectName, Integer id, ReleaseState scheduleStatus) { - - Map result = new HashMap(5); - - Project project = projectMapper.queryByName(projectName); - Map checkResult = checkAuth(loginUser, projectName, project); - if (checkResult != null) { - return checkResult; - } - - // check schedule exists - Schedule scheduleObj = scheduleMapper.selectById(id); - - if (scheduleObj == null) { - putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); - return result; - } - // check schedule release state - if(scheduleObj.getReleaseState() == scheduleStatus){ - logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", - scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); - putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); - return result; - } - ProcessDefinition processDefinition = processDao.findProcessDefineById(scheduleObj.getProcessDefinitionId()); - if (processDefinition == null) { - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); - return result; - } - - if(scheduleStatus == ReleaseState.ONLINE){ - // check process definition release state - if(processDefinition.getReleaseState() != ReleaseState.ONLINE){ - logger.info("not release process definition id: {} , name : {}", - processDefinition.getId(), processDefinition.getName()); - putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, scheduleObj.getProcessDefinitionId()); - return result; - } - // check sub process definition release state - List subProcessDefineIds = new ArrayList<>(); - processDao.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); - Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); - if (subProcessDefineIds.size() > 0){ - List subProcessDefinitionList = - processDefinitionMapper.queryDefinitionListByIdList(idArray); - if (subProcessDefinitionList != null && subProcessDefinitionList.size() > 0){ - for (ProcessDefinition subProcessDefinition : subProcessDefinitionList){ - /** - * if there is no online process, exit directly - */ - if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE){ - logger.info("not release process definition id: {} , name : {}", - subProcessDefinition.getId(), subProcessDefinition.getName()); - putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); - return result; - } - } - } - } - } - - // check master server exists - List masterServers = monitorService.getServerListFromZK(true); - - - if (masterServers.size() == 0) { - putMsg(result, Status.MASTER_NOT_EXISTS); - } - - // set status - scheduleObj.setReleaseState(scheduleStatus); - - scheduleMapper.updateById(scheduleObj); - - try { - switch (scheduleStatus) { - case ONLINE: { - logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}, port: {}", project.getId(), processDefinition.getId(), masterServers); - setSchedule(project.getId(), id); - break; - } - case OFFLINE: { - logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}, port: {}", project.getId(), processDefinition.getId(), masterServers); - deleteSchedule(project.getId(), id); - break; - } - default: { - putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); - return result; - } - } - } catch (Exception e) { - result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); - throw new RuntimeException(result.get(Constants.MSG).toString()); - } - - putMsg(result, Status.SUCCESS); - return result; - } - - - - /** - * query schedule - * - * @param loginUser - * @param projectName - * @param processDefineId - * @return - */ - public Map querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) { - - HashMap result = new HashMap<>(); - - Project project = projectMapper.queryByName(projectName); - - // check project auth - Map checkResult = checkAuth(loginUser, projectName, project); - if (checkResult != null) { - return checkResult; - } - - ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId); - if (processDefinition == null) { - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); - return result; - } - Page page = new Page(pageNo, pageSize); - IPage scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging( - page, processDefineId, searchVal - ); - - - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int)scheduleIPage.getTotal()); - pageInfo.setLists(scheduleIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * query schedule list - * - * @param loginUser - * @param projectName - * @return - */ - public Map queryScheduleList(User loginUser, String projectName) { - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - // check project auth - Map checkResult = checkAuth(loginUser, projectName, project); - if (checkResult != null) { - return checkResult; - } - - List schedules = scheduleMapper.querySchedulerListByProjectName(projectName); - - result.put(Constants.DATA_LIST, schedules); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * set schedule - * - * @see - */ - public void setSchedule(int projectId, int scheduleId) throws RuntimeException{ - logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId); - - - Schedule schedule = processDao.querySchedule(scheduleId); - if (schedule == null) { - logger.warn("process schedule info not exists"); - } - - Date startDate = schedule.getStartTime(); - Date endDate = schedule.getEndTime(); - - String jobName = QuartzExecutors.buildJobName(scheduleId); - String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); - - Map dataMap = QuartzExecutors.buildDataMap(projectId, scheduleId, schedule); - - QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, jobName, jobGroupName, startDate, endDate, - schedule.getCrontab(), dataMap); - - } - - /** - * delete schedule - */ - public static void deleteSchedule(int projectId, int scheduleId) throws RuntimeException{ - logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId); - - String jobName = QuartzExecutors.buildJobName(scheduleId); - String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); - - if(!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)){ - logger.warn("set offline failure:projectId:{},scheduleId:{}",projectId,scheduleId); - throw new RuntimeException(String.format("set offline failure")); - } - - } - - /** - * check valid - * - * @param result - * @param bool - * @param status - * @return - */ - private boolean checkValid(Map result, boolean bool, Status status) { - // timeout is valid - if (bool) { - putMsg(result, status); - return true; - } - return false; - } - - /** - * - * @param loginUser - * @param projectName - * @param project - * @return - */ - private Map checkAuth(User loginUser, String projectName, Project project) { - // check project auth - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - return null; - } - - /** - * delete schedule by id - * - * @param loginUser - * @param projectName - * @param scheduleId - * @return - */ - public Map deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - Schedule schedule = scheduleMapper.selectById(scheduleId); - - if (schedule == null) { - putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); - return result; - } - - // Determine if the login user is the owner of the schedule - if (loginUser.getId() != schedule.getUserId()) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - // check schedule is already online - if(schedule.getReleaseState() == ReleaseState.ONLINE){ - putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE,schedule.getId()); - return result; - } - - - int delete = scheduleMapper.deleteById(scheduleId); - - if (delete > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); - } - return result; - } - - /** - * preview schedule - * @param loginUser - * @param projectName - * @param schedule - * @return - */ - public Map previewSchedule(User loginUser, String projectName, String schedule) { - Map result = new HashMap<>(5); - CronExpression cronExpression; - ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); - Date now = new Date(); - - Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime(); - Date endTime = scheduleParam.getEndTime(); - try { - cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab()); - } catch (ParseException e) { - logger.error(e.getMessage(),e); - putMsg(result,Status.PARSE_TO_CRON_EXPRESSION_ERROR); - return result; - } - List selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime,cronExpression); - result.put(Constants.DATA_LIST, selfFireDateList.stream().map(t -> DateUtils.dateToString(t)).limit(cn.escheduler.common.Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT)); - putMsg(result, Status.SUCCESS); - return result; - } -} \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/SessionService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/SessionService.java deleted file mode 100644 index 9408ab9c6c..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/SessionService.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - - -import cn.escheduler.api.controller.BaseController; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.dao.entity.Session; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.SessionMapper; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; -import java.util.Date; -import java.util.List; -import java.util.UUID; - -/** - * session service - */ -@Service -public class SessionService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(SessionService.class); - - @Autowired - private SessionMapper sessionMapper; - - /** - * get user session from request - * - * @param request - * @return - */ - public Session getSession(HttpServletRequest request) { - String sessionId = request.getHeader(Constants.SESSION_ID); - - if(StringUtils.isBlank(sessionId)) { - Cookie cookie = getCookie(request, Constants.SESSION_ID); - - if (cookie != null) { - sessionId = cookie.getValue(); - } - } - - if(StringUtils.isBlank(sessionId)) { - return null; - } - - String ip = BaseController.getClientIpAddress(request); - logger.debug("get session: {}, ip: {}", sessionId, ip); - - return sessionMapper.selectById(sessionId); - } - - /** - * create session - * - * @param user - * @param ip - * @return - */ - public String createSession(User user, String ip) { - Session session = null; - - // logined - List sessionList = sessionMapper.queryByUserId(user.getId()); - - Date now = new Date(); - - /** - * if you have logged in and are still valid, return directly - */ - if (CollectionUtils.isNotEmpty(sessionList)) { - // is session list greater 1 , delete other ,get one - if (sessionList.size() > 1){ - for (int i=1 ; i < sessionList.size();i++){ - sessionMapper.deleteById(sessionList.get(i).getId()); - } - } - session = sessionList.get(0); - if (now.getTime() - session.getLastLoginTime().getTime() <= Constants.SESSION_TIME_OUT * 1000) { - /** - * updateProcessInstance the latest login time - */ - session.setLastLoginTime(now); - sessionMapper.updateById(session); - - return session.getId(); - - } else { - /** - * session expired, then delete this session first - */ - sessionMapper.deleteById(session.getId()); - } - } - - // assign new session - session = new Session(); - - session.setId(UUID.randomUUID().toString()); - session.setIp(ip); - session.setUserId(user.getId()); - session.setLastLoginTime(now); - - sessionMapper.insert(session); - - return session.getId(); - } - - /** - * sign out - * remove ip restrictions - * - * @param ip no use - * @param loginUser - */ - public void signOut(String ip, User loginUser) { - /** - * query session by user id and ip - */ - List sessionList = sessionMapper.queryByUserId(loginUser.getId()); - - for (Session session : sessionList){ - //delete session - sessionMapper.deleteById(session.getId()); - } - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/TaskInstanceService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/TaskInstanceService.java deleted file mode 100644 index 6acb6d0133..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/TaskInstanceService.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.Project; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.ProjectMapper; -import cn.escheduler.dao.mapper.TaskInstanceMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.text.MessageFormat; -import java.util.*; - -/** - * task instance service - */ -@Service -public class TaskInstanceService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(TaskInstanceService.class); - - @Autowired - ProjectMapper projectMapper; - - @Autowired - ProjectService projectService; - - @Autowired - ProcessDao processDao; - - @Autowired - TaskInstanceMapper taskInstanceMapper; - - - /** - * query task list by project, process instance, task name, task start time, task end time, task status, keyword paging - * - * @param loginUser - * @param projectName - * @param processInstanceId - * @param taskName - * @param startDate - * @param endDate - * @param searchVal - * @param stateType - * @param pageNo - * @param pageSize - * @return - */ - public Map queryTaskListPaging(User loginUser, String projectName, - Integer processInstanceId, String taskName, String startDate, String endDate, - String searchVal, ExecutionStatus stateType,String host, - Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - return checkResult; - } - - int[] statusArray = null; - String statesStr = null; - // filter by status - if(stateType != null){ - statusArray = new int[]{stateType.ordinal()}; - } - if(statusArray != null){ - statesStr = Arrays.toString(statusArray).replace("[", "").replace("]",""); - } - - Date start = null; - Date end = null; - try { - if(StringUtils.isNotEmpty(startDate)){ - start = DateUtils.getScheduleDate(startDate); - } - if(StringUtils.isNotEmpty( endDate)){ - end = DateUtils.getScheduleDate(endDate); - } - } catch (Exception e) { - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); - return result; - } - - Page page = new Page(pageNo, pageSize); - IPage taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging( - page, project.getId(), processInstanceId, searchVal, taskName, statesStr, host, start, end - ); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - Set exclusionSet = new HashSet(){{ - add(Constants.CLASS); - add("taskJson"); - }}; - pageInfo.setTotalCount((int)taskInstanceIPage.getTotal()); - pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(),exclusionSet)); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/TaskRecordService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/TaskRecordService.java deleted file mode 100644 index 32c972dcbf..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/TaskRecordService.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.dao.TaskRecordDao; -import cn.escheduler.dao.entity.TaskRecord; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Service; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static cn.escheduler.common.Constants.*; - -/** - * task record service - */ -@Service -public class TaskRecordService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(TaskRecordService.class); - - /** - * query task record list paging - * - * @param taskName - * @param startDate - * @param taskDate - * @param sourceTable - * @param destTable - * @param endDate - * @param state - * @param pageNo - * @param pageSize - * @return - */ - public Map queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate, - String taskDate, String sourceTable, - String destTable, String endDate, - String state, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(10); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - - Map map = new HashMap<>(10); - map.put("taskName", taskName); - map.put("taskDate", taskDate); - map.put("state", state); - map.put("sourceTable", sourceTable); - map.put("targetTable", destTable); - map.put("startTime", startDate); - map.put("endTime", endDate); - map.put("offset", pageInfo.getStart().toString()); - map.put("pageSize", pageInfo.getPageSize().toString()); - - String table = isHistory ? TASK_RECORD_TABLE_HISTORY_HIVE_LOG : TASK_RECORD_TABLE_HIVE_LOG; - int count = TaskRecordDao.countTaskRecord(map, table); - List recordList = TaskRecordDao.queryAllTaskRecord(map, table); - pageInfo.setTotalCount(count); - pageInfo.setLists(recordList); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java deleted file mode 100644 index 8599b0ec60..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java +++ /dev/null @@ -1,300 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.HadoopUtils; -import cn.escheduler.common.utils.PropertyUtils; -import cn.escheduler.dao.entity.Tenant; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.TenantMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.fs.FileStatus; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * tenant service - */ -@Service -public class TenantService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(TenantService.class); - - @Autowired - private TenantMapper tenantMapper; - - /** - * create tenant - * - * @param loginUser - * @param tenantCode - * @param tenantName - * @param queueId - * @param desc - * @return - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public Map createTenant(User loginUser, - String tenantCode, - String tenantName, - int queueId, - String desc) throws Exception { - - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - if (checkAdmin(loginUser, result)) { - return result; - } - - if (!checkTenant(tenantCode)){ - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, tenantCode); - return result; - } - - - Tenant tenant = new Tenant(); - Date now = new Date(); - - if (!tenantCode.matches("^[0-9a-zA-Z_.-]{1,}$") || tenantCode.startsWith("-") || tenantCode.startsWith(".")){ - putMsg(result, Status.VERIFY_TENANT_CODE_ERROR); - return result; - } - tenant.setTenantCode(tenantCode); - tenant.setTenantName(tenantName); - tenant.setQueueId(queueId); - tenant.setDescription(desc); - tenant.setCreateTime(now); - tenant.setUpdateTime(now); - - // save - tenantMapper.insert(tenant); - - // if hdfs startup - if (PropertyUtils.getResUploadStartupState()){ - createTenantDirIfNotExists(tenantCode); - } - - putMsg(result, Status.SUCCESS); - - return result; -} - - - - /** - * query tenant list paging - * - * @param loginUser - * @param searchVal - * @param pageNo - * @param pageSize - * @return - */ - public Map queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - - Map result = new HashMap<>(5); - if (checkAdmin(loginUser, result)) { - return result; - } - - Page page = new Page(pageNo, pageSize); - IPage tenantIPage = tenantMapper.queryTenantPaging(page, searchVal); - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount((int)tenantIPage.getTotal()); - pageInfo.setLists(tenantIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * updateProcessInstance tenant - * - * @param loginUser - * @param tenantCode - * @param tenantName - * @param queueId - * @param desc - * @return - */ - public Map updateTenant(User loginUser,int id,String tenantCode, String tenantName, int queueId, String desc) throws Exception { - - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - - if (checkAdmin(loginUser, result)) { - return result; - } - - Tenant tenant = tenantMapper.queryById(id); - - if (tenant == null){ - putMsg(result, Status.TENANT_NOT_EXIST); - return result; - } - - // updateProcessInstance tenant - /** - * if the tenant code is modified, the original resource needs to be copied to the new tenant. - */ - if (!tenant.getTenantCode().equals(tenantCode)){ - if (checkTenant(tenantCode)){ - // if hdfs startup - if (PropertyUtils.getResUploadStartupState()){ - String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources"; - String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); - //init hdfs resource - HadoopUtils.getInstance().mkdir(resourcePath); - HadoopUtils.getInstance().mkdir(udfsPath); - } - }else { - putMsg(result, Status.TENANT_CODE_HAS_ALREADY_EXISTS); - return result; - } - } - - Date now = new Date(); - - if (StringUtils.isNotEmpty(tenantCode)){ - tenant.setTenantCode(tenantCode); - } - - if (StringUtils.isNotEmpty(tenantName)){ - tenant.setTenantName(tenantName); - } - - if (queueId != 0){ - tenant.setQueueId(queueId); - } - tenant.setDescription(desc); - tenant.setUpdateTime(now); - tenantMapper.updateById(tenant); - - result.put(Constants.STATUS, Status.SUCCESS); - result.put(Constants.MSG, Status.SUCCESS.getMsg()); - return result; - } - - /** - * delete tenant - * - * @param loginUser - * @param id - * @return - */ - @Transactional(value = "TransactionManager", rollbackFor = Exception.class) - public Map deleteTenantById(User loginUser, int id) throws Exception { - Map result = new HashMap<>(5); - - if (checkAdmin(loginUser, result)) { - return result; - } - - Tenant tenant = tenantMapper.queryById(id); - - if (tenant == null){ - putMsg(result, Status.TENANT_NOT_EXIST); - return result; - } - - // if resource upload startup - if (PropertyUtils.getResUploadStartupState()){ - String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode(); - - if (HadoopUtils.getInstance().exists(tenantPath)){ - String resourcePath = HadoopUtils.getHdfsResDir(tenant.getTenantCode()); - FileStatus[] fileStatus = HadoopUtils.getInstance().listFileStatus(resourcePath); - if (fileStatus.length > 0) { - putMsg(result, Status.HDFS_TERANT_RESOURCES_FILE_EXISTS); - return result; - } - fileStatus = HadoopUtils.getInstance().listFileStatus(HadoopUtils.getHdfsUdfDir(tenant.getTenantCode())); - if (fileStatus.length > 0) { - putMsg(result, Status.HDFS_TERANT_UDFS_FILE_EXISTS); - return result; - } - - HadoopUtils.getInstance().delete(tenantPath, true); - } - } - - tenantMapper.deleteById(id); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * query tenant list - * - * @param loginUser - * @return - */ - public Map queryTenantList(User loginUser) { - - Map result = new HashMap<>(5); - - List resourceList = tenantMapper.selectList(null); - result.put(Constants.DATA_LIST, resourceList); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * verify tenant code - * - * @param tenantCode - * @return - */ - public Result verifyTenantCode(String tenantCode) { - Result result=new Result(); - if (checkTenant(tenantCode)) { - logger.error("tenant {} has exist, can't create again.", tenantCode); - putMsg(result, Status.TENANT_NAME_EXIST); - }else{ - putMsg(result, Status.SUCCESS); - } - return result; - } - - - /** - * check tenant exists - * - * @param tenantCode - * @return - */ - private boolean checkTenant(String tenantCode) { - return tenantMapper.queryByTenantCode(tenantCode) == null ? true : false; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/UdfFuncService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/UdfFuncService.java deleted file mode 100644 index 202a24291f..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/UdfFuncService.java +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.UdfType; -import cn.escheduler.common.utils.PropertyUtils; -import cn.escheduler.dao.entity.Resource; -import cn.escheduler.dao.entity.UdfFunc; -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.mapper.ResourceMapper; -import cn.escheduler.dao.mapper.UDFUserMapper; -import cn.escheduler.dao.mapper.UdfFuncMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * udf function service - */ -@Service -public class UdfFuncService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(UdfFuncService.class); - - @Autowired - private ResourceMapper resourceMapper; - - @Autowired - private UdfFuncMapper udfFuncMapper; - - @Autowired - private UDFUserMapper udfUserMapper; - - - /** - * create udf function - * - * @param loginUser - * @param funcName - * @param argTypes - * @param database - * @param desc - * @param type - * @param resourceId - * @return - */ - public Result createUdfFunction(User loginUser, - String funcName, - String className, - String argTypes, - String database, - String desc, - UdfType type, - int resourceId) { - Result result = new Result(); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - // verify udf func name exist - if (checkUdfFuncNameExists(funcName)) { - logger.error("udf func {} has exist, can't recreate", funcName); - putMsg(result, Status.UDF_FUNCTION_EXISTS); - return result; - } - - Resource resource = resourceMapper.selectById(resourceId); - if (resource == null) { - logger.error("resourceId {} is not exist", resourceId); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - //save data - UdfFunc udf = new UdfFunc(); - Date now = new Date(); - udf.setUserId(loginUser.getId()); - udf.setFuncName(funcName); - udf.setClassName(className); - if (StringUtils.isNotEmpty(argTypes)) { - udf.setArgTypes(argTypes); - } - if (StringUtils.isNotEmpty(argTypes)) { - udf.setDatabase(database); - } - udf.setDesc(desc); - udf.setResourceId(resourceId); - udf.setResourceName(resource.getAlias()); - udf.setType(type); - - udf.setCreateTime(now); - udf.setUpdateTime(now); - - udfFuncMapper.insert(udf); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * - * @param name - * @return - */ - private boolean checkUdfFuncNameExists(String name){ - List resource = udfFuncMapper.queryUdfByIdStr(null, name); - if(resource != null && resource.size() > 0){ - return true; - } - return false; - } - - - /** - * query udf function - */ - public Map queryUdfFuncDetail(int id) { - - Map result = new HashMap<>(5); - UdfFunc udfFunc = udfFuncMapper.selectById(id); - if (udfFunc == null) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - result.put(Constants.DATA_LIST, udfFunc); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * updateProcessInstance udf function - * - * @param funcName - * @param argTypes - * @param database - * @param desc - * @param type - * @param resourceId - * @return - */ - public Map updateUdfFunc(int udfFuncId, - String funcName, - String className, - String argTypes, - String database, - String desc, - UdfType type, - int resourceId) { - Map result = new HashMap<>(); - // verify udfFunc is exist - UdfFunc udf = udfFuncMapper.selectById(udfFuncId); - - // if resource upload startup - if (!PropertyUtils.getResUploadStartupState()){ - logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); - putMsg(result, Status.HDFS_NOT_STARTUP); - return result; - } - - if (udf == null) { - result.put(Constants.STATUS, Status.UDF_FUNCTION_NOT_EXIST); - result.put(Constants.MSG, Status.UDF_FUNCTION_NOT_EXIST.getMsg()); - return result; - } - - // verify udfFuncName is exist - if (!funcName.equals(udf.getFuncName())) { - if (checkUdfFuncNameExists(funcName)) { - logger.error("UdfFunc {} has exist, can't create again.", funcName); - result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS); - result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg()); - return result; - } - } - - Resource resource = resourceMapper.selectById(resourceId); - if (resource == null) { - logger.error("resourceId {} is not exist", resourceId); - result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST); - result.put(Constants.MSG, Status.RESOURCE_NOT_EXIST.getMsg()); - return result; - } - Date now = new Date(); - udf.setFuncName(funcName); - udf.setClassName(className); - if (StringUtils.isNotEmpty(argTypes)) { - udf.setArgTypes(argTypes); - } - if (StringUtils.isNotEmpty(argTypes)) { - udf.setDatabase(database); - } - udf.setDesc(desc); - udf.setResourceId(resourceId); - udf.setResourceName(resource.getAlias()); - udf.setType(type); - - - udf.setCreateTime(now); - udf.setUpdateTime(now); - - udfFuncMapper.updateById(udf); - putMsg(result, Status.SUCCESS); - return result; - } - - - /** - * query udf function list paging - * - * @param loginUser - * @param searchVal - * @param pageNo - * @param pageSize - * @return - */ - public Map queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); - - - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - IPage udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo); - pageInfo.setTotalCount((int)udfFuncList.getTotal()); - pageInfo.setLists(udfFuncList.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * get udf functions - * - * @param loginUser - * @param searchVal - * @param pageSize - * @param pageNo - * @return - */ - private IPage getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) { - - int userId = loginUser.getId(); - if (isAdmin(loginUser)) { - userId = 0; - } - Page page = new Page(pageNo, pageSize); - return udfFuncMapper.queryUdfFuncPaging(page, userId, searchVal); - } - - /** - * query data resource by type - * - * @param loginUser - * @param type - * @return - */ - public Map queryResourceList(User loginUser, Integer type) { - Map result = new HashMap<>(5); - List udfFuncList = udfFuncMapper.getUdfFuncByType(loginUser.getId(), type); - - result.put(Constants.DATA_LIST, udfFuncList); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * delete udf function - * - * @param id - */ - @Transactional(value = "TransactionManager", rollbackFor = Exception.class) - public Result delete(int id) { - Result result = new Result(); - - udfFuncMapper.deleteById(id); - udfUserMapper.deleteByUdfFuncId(id); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * verify udf function by name - * - * @param name - * @return - */ - public Result verifyUdfFuncByName(String name) { - Result result = new Result(); - if (checkUdfFuncNameExists(name)) { - logger.error("UDF function name:{} has exist, can't create again.", name); - putMsg(result, Status.UDF_FUNCTION_EXISTS); - } else { - putMsg(result, Status.SUCCESS); - } - - return result; - } - -} \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java deleted file mode 100644 index a32cfe194a..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/UsersService.java +++ /dev/null @@ -1,694 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.CheckUtils; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.ResourceType; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.EncryptionUtils; -import cn.escheduler.common.utils.HadoopUtils; -import cn.escheduler.common.utils.PropertyUtils; -import cn.escheduler.dao.entity.*; -import cn.escheduler.dao.mapper.*; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import java.util.*; - -/** - * user service - */ -@Service -public class UsersService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(UsersService.class); - - @Autowired - private UserMapper userMapper; - - @Autowired - private TenantMapper tenantMapper; - - @Autowired - private ProjectUserMapper projectUserMapper; - - @Autowired - private ResourceUserMapper resourcesUserMapper; - - @Autowired - private ResourceMapper resourceMapper; - - @Autowired - private DataSourceUserMapper datasourceUserMapper; - - @Autowired - private UDFUserMapper udfUserMapper; - - @Autowired - private AlertGroupMapper alertGroupMapper; - - - /** - * create user, only system admin have permission - * - * @param loginUser - * @param userName - * @param userPassword - * @param email - * @param tenantId - * @param phone - * @return - */ - @Transactional(value = "TransactionManager", rollbackFor = Exception.class) - public Map createUser(User loginUser, - String userName, - String userPassword, - String email, - int tenantId, - String phone, - String queue) throws Exception { - - Map result = new HashMap<>(5); - result = CheckUtils.checkUserParams(userName, userPassword, email, phone); - if (result.get(Constants.STATUS) != Status.SUCCESS) { - return result; - } - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - - if (check(result, checkTenant(tenantId), Status.TENANT_NOT_EXIST, Constants.STATUS)) { - return result; - } - - User user = new User(); - Date now = new Date(); - - user.setUserName(userName); - user.setUserPassword(EncryptionUtils.getMd5(userPassword)); - user.setEmail(email); - user.setTenantId(tenantId); - user.setPhone(phone); - // create general users, administrator users are currently built-in - user.setUserType(UserType.GENERAL_USER); - user.setCreateTime(now); - user.setUpdateTime(now); - if (StringUtils.isEmpty(queue)){ - queue = ""; - } - user.setQueue(queue); - - // save user - userMapper.insert(user); - - Tenant tenant = tenantMapper.queryById(tenantId); - // resource upload startup - if (PropertyUtils.getResUploadStartupState()){ - // if tenant not exists - if (!HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(tenant.getTenantCode()))){ - createTenantDirIfNotExists(tenant.getTenantCode()); - } - String userPath = HadoopUtils.getHdfsUserDir(tenant.getTenantCode(),user.getId()); - HadoopUtils.getInstance().mkdir(userPath); - } - - putMsg(result, Status.SUCCESS); - return result; - - } - - /** - * query user - * - * @param name - * @param password - * @return - */ - public User queryUser(String name, String password) { - String md5 = EncryptionUtils.getMd5(password); - return userMapper.queryUserByNamePassword(name, md5); - } - - /** - * check general user or not - * - * @param user - * @return - */ - public boolean isGeneral(User user) { - return user.getUserType() == UserType.GENERAL_USER; - } - - /** - * query user list - * - * @param loginUser - * @param searchVal - * @param pageNo - * @param pageSize - * @return - */ - public Map queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); - - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - - Page page = new Page(pageNo, pageSize); - - IPage scheduleList = userMapper.queryUserPaging(page, searchVal); - - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount((int)scheduleList.getTotal()); - pageInfo.setLists(scheduleList.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * updateProcessInstance user - * - * @param userId - * @param userName - * @param userPassword - * @param email - * @param tenantId - * @param phone - * @return - */ - public Map updateUser(int userId, - String userName, - String userPassword, - String email, - int tenantId, - String phone, - String queue) throws Exception { - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - - User user = userMapper.selectById(userId); - - if (user == null) { - putMsg(result, Status.USER_NOT_EXIST, userId); - return result; - } - - Date now = new Date(); - - if (StringUtils.isNotEmpty(userName)) { - User tempUser = userMapper.queryByUserNameAccurately(userName); - if (tempUser != null && tempUser.getId() != userId) { - putMsg(result, Status.USER_NAME_EXIST); - return result; - } - user.setUserName(userName); - } - - if (StringUtils.isNotEmpty(userPassword)) { - user.setUserPassword(EncryptionUtils.getMd5(userPassword)); - } - - if (StringUtils.isNotEmpty(email)) { - user.setEmail(email); - } - user.setQueue(queue); - user.setPhone(phone); - user.setUpdateTime(now); - - //if user switches the tenant, the user's resources need to be copied to the new tenant - if (user.getTenantId() != tenantId) { - Tenant oldTenant = tenantMapper.queryById(user.getTenantId()); - //query tenant - Tenant newTenant = tenantMapper.queryById(tenantId); - if (newTenant != null) { - // if hdfs startup - if (PropertyUtils.getResUploadStartupState() && oldTenant != null){ - String newTenantCode = newTenant.getTenantCode(); - String oldResourcePath = HadoopUtils.getHdfsResDir(oldTenant.getTenantCode()); - String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode()); - - // if old tenant dir exists - if (HadoopUtils.getInstance().exists(oldResourcePath)){ - String newResourcePath = HadoopUtils.getHdfsResDir(newTenantCode); - String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode); - - //file resources list - List fileResourcesList = resourceMapper.queryResourceList( - null, userId, ResourceType.FILE.ordinal()); - if (CollectionUtils.isNotEmpty(fileResourcesList)) { - for (Resource resource : fileResourcesList) { - HadoopUtils.getInstance().copy(oldResourcePath + "/" + resource.getAlias(), newResourcePath, false, true); - } - } - - //udf resources - List udfResourceList = resourceMapper.queryResourceList( - null, userId, ResourceType.UDF.ordinal()); - if (CollectionUtils.isNotEmpty(udfResourceList)) { - for (Resource resource : udfResourceList) { - HadoopUtils.getInstance().copy(oldUdfsPath + "/" + resource.getAlias(), newUdfsPath, false, true); - } - } - - //Delete the user from the old tenant directory - String oldUserPath = HadoopUtils.getHdfsUserDir(oldTenant.getTenantCode(),userId); - HadoopUtils.getInstance().delete(oldUserPath, true); - }else { - // if old tenant dir not exists , create - createTenantDirIfNotExists(oldTenant.getTenantCode()); - } - - if (HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(newTenant.getTenantCode()))){ - //create user in the new tenant directory - String newUserPath = HadoopUtils.getHdfsUserDir(newTenant.getTenantCode(),user.getId()); - HadoopUtils.getInstance().mkdir(newUserPath); - }else { - // if new tenant dir not exists , create - createTenantDirIfNotExists(newTenant.getTenantCode()); - } - - } - } - user.setTenantId(tenantId); - } - - // updateProcessInstance user - userMapper.updateById(user); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * delete user - * - * @param loginUser - * @param id - * @return - */ - public Map deleteUserById(User loginUser, int id) throws Exception { - Map result = new HashMap<>(5); - //only admin can operate - if (!isAdmin(loginUser)) { - putMsg(result, Status.USER_NOT_EXIST, id); - return result; - } - - // delete user - User user = userMapper.queryTenantCodeByUserId(id); - - if (user != null) { - if (PropertyUtils.getResUploadStartupState()) { - String userPath = HadoopUtils.getHdfsUserDir(user.getTenantCode(),id); - if (HadoopUtils.getInstance().exists(userPath)) { - HadoopUtils.getInstance().delete(userPath, true); - } - } - } - - userMapper.deleteById(id); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * grant project - * - * @param loginUser - * @param userId - * @param projectIds - * @return - */ - public Map grantProject(User loginUser, int userId, String projectIds) { - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - - //if the selected projectIds are empty, delete all items associated with the user - projectUserMapper.deleteProjectRelation(0, userId); - - if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS, Constants.MSG)) { - return result; - } - - String[] projectIdArr = projectIds.split(","); - - for (String projectId : projectIdArr) { - Date now = new Date(); - ProjectUser projectUser = new ProjectUser(); - projectUser.setUserId(userId); - projectUser.setProjectId(Integer.parseInt(projectId)); - projectUser.setPerm(7); - projectUser.setCreateTime(now); - projectUser.setUpdateTime(now); - projectUserMapper.insert(projectUser); - } - - putMsg(result, Status.SUCCESS); - - return result; - } - - - /** - * grant resource - * - * @param loginUser - * @param userId - * @param resourceIds - * @return - */ - public Map grantResources(User loginUser, int userId, String resourceIds) { - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - User user = userMapper.selectById(userId); - if(user == null){ - putMsg(result, Status.USER_NOT_EXIST, userId); - return result; - } - - resourcesUserMapper.deleteResourceUser(userId, 0); - - if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS, Constants.MSG)) { - return result; - } - - String[] resourcesIdArr = resourceIds.split(","); - - for (String resourceId : resourcesIdArr) { - Date now = new Date(); - ResourcesUser resourcesUser = new ResourcesUser(); - resourcesUser.setUserId(userId); - resourcesUser.setResourcesId(Integer.parseInt(resourceId)); - resourcesUser.setPerm(7); - resourcesUser.setCreateTime(now); - resourcesUser.setUpdateTime(now); - resourcesUserMapper.insert(resourcesUser); - } - - putMsg(result, Status.SUCCESS); - - return result; - } - - - /** - * grant udf function - * - * @param loginUser - * @param userId - * @param udfIds - * @return - */ - public Map grantUDFFunction(User loginUser, int userId, String udfIds) { - Map result = new HashMap<>(5); - - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - - udfUserMapper.deleteByUserId(userId); - - if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS, Constants.MSG)) { - return result; - } - - String[] resourcesIdArr = udfIds.split(","); - - for (String udfId : resourcesIdArr) { - Date now = new Date(); - UDFUser udfUser = new UDFUser(); - udfUser.setUserId(userId); - udfUser.setUdfId(Integer.parseInt(udfId)); - udfUser.setPerm(7); - udfUser.setCreateTime(now); - udfUser.setUpdateTime(now); - udfUserMapper.insert(udfUser); - } - - putMsg(result, Status.SUCCESS); - - return result; - } - - - /** - * grant datasource - * - * @param loginUser - * @param userId - * @param datasourceIds - * @return - */ - public Map grantDataSource(User loginUser, int userId, String datasourceIds) { - Map result = new HashMap<>(5); - result.put(Constants.STATUS, false); - - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - - datasourceUserMapper.deleteByUserId(userId); - - if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS, Constants.MSG)) { - return result; - } - - String[] datasourceIdArr = datasourceIds.split(","); - - for (String datasourceId : datasourceIdArr) { - Date now = new Date(); - - DatasourceUser datasourceUser = new DatasourceUser(); - datasourceUser.setUserId(userId); - datasourceUser.setDatasourceId(Integer.parseInt(datasourceId)); - datasourceUser.setPerm(7); - datasourceUser.setCreateTime(now); - datasourceUser.setUpdateTime(now); - datasourceUserMapper.insert(datasourceUser); - } - - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * query user info - * - * @param loginUser - * @return - */ - public Map getUserInfo(User loginUser) { - - Map result = new HashMap<>(); - - User user = null; - if (loginUser.getUserType() == UserType.ADMIN_USER) { - user = loginUser; - } else { - user = userMapper.queryDetailsById(loginUser.getId()); - - List alertGroups = alertGroupMapper.queryByUserId(loginUser.getId()); - - StringBuilder sb = new StringBuilder(); - - if (alertGroups != null && alertGroups.size() > 0) { - for (int i = 0; i < alertGroups.size() - 1; i++) { - sb.append(alertGroups.get(i).getGroupName() + ","); - } - sb.append(alertGroups.get(alertGroups.size() - 1)); - user.setAlertGroup(sb.toString()); - } - } - - result.put(Constants.DATA_LIST, user); - - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * query user list - * - * @param loginUser - * @return - */ - public Map queryAllGeneralUsers(User loginUser) { - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - - List userList = userMapper.queryAllGeneralUser(); - result.put(Constants.DATA_LIST, userList); - putMsg(result, Status.SUCCESS); - - return result; - } - - - /** - * query user list - * - * @param loginUser - * @return - */ - public Map queryUserList(User loginUser) { - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - - List userList = userMapper.selectList(null ); - result.put(Constants.DATA_LIST, userList); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * verify user name exists - * - * @param userName - * @return - */ - public Result verifyUserName(String userName) { - - cn.escheduler.api.utils.Result result = new cn.escheduler.api.utils.Result(); - User user = userMapper.queryByUserNameAccurately(userName); - if (user != null) { - logger.error("user {} has exist, can't create again.", userName); - - putMsg(result, Status.USER_NAME_EXIST); - } else { - putMsg(result, Status.SUCCESS); - } - - return result; - } - - - /** - * unauthorized user - * - * @param loginUser - * @param alertgroupId - * @return - */ - public Map unauthorizedUser(User loginUser, Integer alertgroupId) { - - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - - List userList = userMapper.selectList(null); - List resultUsers = new ArrayList<>(); - Set userSet = null; - if (userList != null && userList.size() > 0) { - userSet = new HashSet<>(userList); - - List authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId); - - Set authedUserSet = null; - if (authedUserList != null && authedUserList.size() > 0) { - authedUserSet = new HashSet<>(authedUserList); - userSet.removeAll(authedUserSet); - } - resultUsers = new ArrayList<>(userSet); - } - result.put(Constants.DATA_LIST, resultUsers); - putMsg(result, Status.SUCCESS); - - return result; - } - - - /** - * authorized user - * - * @param loginUser - * @param alertgroupId - * @return - */ - public Map authorizedUser(User loginUser, Integer alertgroupId) { - Map result = new HashMap<>(5); - //only admin can operate - if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { - return result; - } - List userList = userMapper.queryUserListByAlertGroupId(alertgroupId); - result.put(Constants.DATA_LIST, userList); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * check - * - * @param result - * @param bool - * @param userNoOperationPerm - * @param status - * @return - */ - private boolean check(Map result, boolean bool, Status userNoOperationPerm, String status) { - //only admin can operate - if (bool) { - result.put(Constants.STATUS, userNoOperationPerm); - result.put(status, userNoOperationPerm.getMsg()); - return true; - } - return false; - } - - /** - * @param tenantId - * @return - */ - private boolean checkTenant(int tenantId) { - return tenantMapper.queryById(tenantId) == null ? true : false; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/WorkerGroupService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/WorkerGroupService.java deleted file mode 100644 index cdbc53bdbc..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/WorkerGroupService.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.dao.entity.WorkerGroup; -import cn.escheduler.dao.mapper.WorkerGroupMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang3.StringUtils; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.lang.invoke.WrongMethodTypeException; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * work group service - */ -@Service -public class WorkerGroupService extends BaseService { - - - @Autowired - WorkerGroupMapper workerGroupMapper; - - /** - * create or update a worker group - * @param id - * @param name - * @param ipList - * @return - */ - public Map saveWorkerGroup(int id, String name, String ipList){ - - Map result = new HashMap<>(5); - - if(StringUtils.isEmpty(name)){ - putMsg(result, Status.NAME_NULL); - return result; - } - Date now = new Date(); - WorkerGroup workerGroup = null; - if(id != 0){ - workerGroup = workerGroupMapper.selectById(id); - }else{ - workerGroup = new WorkerGroup(); - workerGroup.setCreateTime(now); - } - workerGroup.setName(name); - workerGroup.setIpList(ipList); - workerGroup.setUpdateTime(now); - - if(checkWorkerGroupNameExists(workerGroup)){ - putMsg(result, Status.NAME_EXIST, workerGroup.getName()); - return result; - } - if(workerGroup.getId() != 0 ){ - workerGroupMapper.updateById(workerGroup); - }else{ - workerGroupMapper.insert(workerGroup); - } - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * check worker group name exists - * @param workerGroup - * @return - */ - private boolean checkWorkerGroupNameExists(WorkerGroup workerGroup) { - - List workerGroupList = workerGroupMapper.queryWorkerGroupByName(workerGroup.getName()); - - if(workerGroupList.size() > 0 ){ - // new group has same name.. - if(workerGroup.getId() == 0){ - return true; - } - // update group... - for(WorkerGroup group : workerGroupList){ - if(group.getId() != workerGroup.getId()){ - return true; - } - } - } - return false; - } - - /** - * query worker group paging - * @param pageNo - * @param pageSize - * @param searchVal - * @return - */ - public Map queryAllGroupPaging(Integer pageNo, Integer pageSize, String searchVal) { - - Map result = new HashMap<>(5); - - Page page = new Page(pageNo, pageSize); - IPage workerGroupIPage = workerGroupMapper.queryListPaging( - page, searchVal); - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount((int)workerGroupIPage.getTotal()); - pageInfo.setLists(workerGroupIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * delete worker group by id - * @param id - * @return - */ - public Map deleteWorkerGroupById(Integer id) { - - Map result = new HashMap<>(5); - - workerGroupMapper.deleteById(id); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * query all worker group - * @return - */ - public Map queryAllGroup() { - Map result = new HashMap<>(5); - List workerGroupList = workerGroupMapper.queryAllWorkerGroup(); - result.put(Constants.DATA_LIST, workerGroupList); - putMsg(result, Status.SUCCESS); - return result; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java b/escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java deleted file mode 100644 index 00c50f8263..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.utils; - - -import cn.escheduler.api.enums.Status; -import cn.escheduler.common.enums.ResUploadType; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.common.utils.PropertyUtils; -import cn.escheduler.common.utils.TaskParametersUtils; -import org.apache.commons.lang.StringUtils; - -import java.text.MessageFormat; -import java.util.HashMap; -import java.util.Map; -import java.util.regex.Pattern; - -import static cn.escheduler.common.utils.PropertyUtils.getBoolean; - - -/** - * check utils - */ -public class CheckUtils { - - - /** - * check username - * - * @param userName - */ - public static boolean checkUserName(String userName) { - return regexChecks(userName, cn.escheduler.common.Constants.REGEX_USER_NAME); - } - - /** - * check email - * - * @param email - */ - public static boolean checkEmail(String email) { - return email.length() > 5 && email.length() <= 40 && regexChecks(email, cn.escheduler.common.Constants.REGEX_MAIL_NAME) ; - } - - /** - * check project description - * - * @param desc - */ - public static Map checkDesc(String desc) { - Map result = new HashMap<>(); - if (StringUtils.isNotEmpty(desc) && desc.length() > 200) { - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "desc length")); - }else{ - result.put(Constants.STATUS, Status.SUCCESS); - } - return result; - } - - /** - * check extra info - * - * @param otherParams - */ - public static boolean checkOtherParams(String otherParams) { - return StringUtils.isNotEmpty(otherParams) && !JSONUtils.checkJsonVaild(otherParams); - } - - /** - * check password - * - * @param password - */ - public static boolean checkPassword(String password) { - return StringUtils.isNotEmpty(password) && password.length() >= 2 && password.length() <= 20; - } - - /** - * check phone - * - * @param phone - */ - public static boolean checkPhone(String phone) { - return StringUtils.isNotEmpty(phone) && phone.length() > 18; - } - - - /** - * check task node parameter - * - * @param parameter - * @param taskType - * @return - */ - public static boolean checkTaskNodeParameters(String parameter, String taskType) { - AbstractParameters abstractParameters = TaskParametersUtils.getParameters(taskType, parameter); - - if (abstractParameters != null) { - return abstractParameters.checkParameters(); - } - - return false; - } - - /** - * check params - * @param userName - * @param password - * @param email - * @param phone - * @return - */ - public static Map checkUserParams(String userName, String password, String email, String phone){ - Map result = new HashMap<>(); - try{ - CheckUtils.checkUserName(userName); - CheckUtils.checkEmail(email); - CheckUtils.checkPassword(password); - CheckUtils.checkPhone(phone); - result.put(Constants.STATUS, Status.SUCCESS); - result.put(Constants.MSG, Status.SUCCESS.getMsg()); - }catch (Exception e){ - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, e.getMessage()); - } - return result; - } - - - - - - /** - * 正则匹配 - * - * @param str - * @param pattern - * @return - */ - private static boolean regexChecks(String str, Pattern pattern) { - if (org.apache.commons.lang3.StringUtils.isEmpty(str)) { - return false; - } - - return pattern.matcher(str).matches(); - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/utils/Constants.java b/escheduler-api/src/main/java/cn/escheduler/api/utils/Constants.java deleted file mode 100644 index 1dfe3ac470..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/utils/Constants.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.utils; - -/** - * web application constants - */ -public class Constants { - - /** - * status - */ - public static final String STATUS = "status"; - - /** - * message - */ - public static final String MSG = "msg"; - - /** - * data total - * 数据总数 - */ - public static final String COUNT = "count"; - - /** - * page size - * 每页数据条数 - */ - public static final String PAGE_SIZE = "pageSize"; - - /** - * current page no - * 当前页码 - */ - public static final String PAGE_NUMBER = "pageNo"; - - /** - * result - */ - public static final String RESULT = "result"; - - /** - * - */ - public static final String DATA_LIST = "data"; - - public static final String TOTAL_LIST = "totalList"; - - public static final String CURRENT_PAGE = "currentPage"; - - public static final String TOTAL_PAGE = "totalPage"; - - public static final String TOTAL = "total"; - - /** - * session user - */ - public static final String SESSION_USER = "session.user"; - - public static final String SESSION_ID = "sessionId"; - - public static final String PASSWORD_DEFAULT = "******"; - - /** - * driver - */ - public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver"; - public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver"; - public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver"; - public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver"; - public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver"; - public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; - - /** - * database type - */ - public static final String MYSQL = "MYSQL"; - public static final String POSTGRESQL = "POSTGRESQL"; - public static final String HIVE = "HIVE"; - public static final String SPARK = "SPARK"; - public static final String CLICKHOUSE = "CLICKHOUSE"; - public static final String ORACLE = "ORACLE"; - public static final String SQLSERVER = "SQLSERVER"; - - /** - * jdbc url - */ - public static final String JDBC_MYSQL = "jdbc:mysql://"; - public static final String JDBC_POSTGRESQL = "jdbc:postgresql://"; - public static final String JDBC_HIVE_2 = "jdbc:hive2://"; - public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://"; - public static final String JDBC_ORACLE = "jdbc:oracle:thin:@//"; - public static final String JDBC_SQLSERVER = "jdbc:sqlserver://"; - - - public static final String ADDRESS = "address"; - public static final String DATABASE = "database"; - public static final String JDBC_URL = "jdbcUrl"; - public static final String PRINCIPAL = "principal"; - public static final String USER = "user"; - public static final String PASSWORD = "password"; - public static final String OTHER = "other"; - - - /** - * session timeout - */ - public static final int SESSION_TIME_OUT = 7200; - public static final int maxFileSize = 1024 * 1024 * 1024; - public static final String UDF = "UDF"; - public static final String CLASS = "class"; - public static final String RECEIVERS = "receivers"; - public static final String RECEIVERS_CC = "receiversCc"; -} \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/utils/FileUtils.java b/escheduler-api/src/main/java/cn/escheduler/api/utils/FileUtils.java deleted file mode 100644 index af31d4a828..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/utils/FileUtils.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.utils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.core.io.Resource; -import org.springframework.core.io.UrlResource; -import org.springframework.web.multipart.MultipartFile; - -import java.io.File; -import java.io.IOException; -import java.net.MalformedURLException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; - -/** - * file utils - */ -public class FileUtils { - private static final Logger logger = LoggerFactory.getLogger(FileUtils.class); - - /** - * copy source file to target file - * - * @param file - * @param destFilename - */ - - public static void copyFile(MultipartFile file, String destFilename) { - try { - - File destFile = new File(destFilename); - File destParentDir = new File(destFile.getParent()); - - if (!destParentDir.exists()) { - org.apache.commons.io.FileUtils.forceMkdir(destParentDir); - } - - Files.copy(file.getInputStream(), Paths.get(destFilename)); - } catch (IOException e) { - logger.error(String.format("failed to copy file , {} is empty file", file.getOriginalFilename()), e); - } - } - - /** - * file to resource - * - * @param filename - * @return - */ - public static Resource file2Resource(String filename) throws MalformedURLException { - Path file = Paths.get(filename); - - Resource resource = new UrlResource(file.toUri()); - if (resource.exists() || resource.isReadable()) { - return resource; - } else { - logger.error("file can not read : {}", filename); - - } - return null; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/utils/PageInfo.java b/escheduler-api/src/main/java/cn/escheduler/api/utils/PageInfo.java deleted file mode 100644 index 9d581e8d71..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/utils/PageInfo.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.utils; - -import java.util.List; - -/** - * page info - * - * @param - */ -public class PageInfo { - - /** - * list - */ - private List lists; - /** - * total count - */ - private Integer totalCount = 0; - /** - * page size - */ - private Integer pageSize = 20; - /** - * current page - */ - private Integer currentPage = 0; - /** - * pageNo - */ - private Integer pageNo; - - public PageInfo(Integer currentPage,Integer pageSize){ - if(currentPage==null){ - currentPage=1; - } - this.pageNo=(currentPage-1)*pageSize; - this.pageSize=pageSize; - this.currentPage=currentPage; - } - - public Integer getStart() { - return pageNo; - } - - public void setStart(Integer start) { - this.pageNo = start; - } - - public Integer getTotalPage() { - if (pageSize==null||pageSize == 0) { - pageSize = 7; - } - if (this.totalCount % this.pageSize == 0) { - return (this.totalCount / this.pageSize)==0?1:(this.totalCount / this.pageSize); - } - return (this.totalCount / this.pageSize + 1); - } - - public List getLists() { - return lists; - } - - public void setLists(List lists) { - this.lists = lists; - } - - public Integer getTotalCount() { - if (totalCount==null) { - totalCount = 0; - } - return totalCount; - } - - public void setTotalCount(Integer totalCount) { - this.totalCount = totalCount; - } - - public Integer getPageSize() { - if (pageSize==null||pageSize == 0) { - pageSize = 7; - } - return pageSize; - } - - public void setPageSize(Integer pageSize) { - this.pageSize = pageSize; - } - - public void setCurrentPage(Integer currentPage) { - this.currentPage = currentPage; - } - - public Integer getCurrentPage() { - if (currentPage==null||currentPage <= 0) { - this.currentPage = 1; - } - return this.currentPage; - } - -} \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/utils/Result.java b/escheduler-api/src/main/java/cn/escheduler/api/utils/Result.java deleted file mode 100644 index 7c177827d4..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/utils/Result.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.utils; - -/** - * result - * - * @param - */ -public class Result { - /** - * status - * 状态码 - */ - private Integer code; - - /** - * message - * 消息 - */ - private String msg; - - /** - * data - */ - private T data; - - public Result(){} - - public Result(Integer code , String msg){ - this.code = code; - this.msg = msg; - } - - public Integer getCode() { - return code; - } - - public void setCode(Integer code) { - this.code = code; - } - - public String getMsg() { - return msg; - } - - public void setMsg(String msg) { - this.msg = msg; - } - - public T getData() { - return data; - } - - public void setData(T data) { - this.data = data; - } - - - @Override - public String toString() { - return "Status{" + - "code='" + code + '\'' + - ", msg='" + msg + '\'' + - ", data=" + data + - '}'; - } -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/utils/ZooKeeperState.java b/escheduler-api/src/main/java/cn/escheduler/api/utils/ZooKeeperState.java deleted file mode 100644 index 7d29e8618f..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/utils/ZooKeeperState.java +++ /dev/null @@ -1,211 +0,0 @@ -package cn.escheduler.api.utils; - -import org.apache.commons.lang3.StringUtils; -import org.apache.zookeeper.client.FourLetterWordMain; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.Scanner; - -/** - * zookeeper状态监控:4字口诀 - * - */ -public class ZooKeeperState { - - private static final Logger logger = LoggerFactory.getLogger(ZooKeeperState.class); - - private final String host; - private final int port; - - private int minLatency = -1, avgLatency = -1, maxLatency = -1; - private long received = -1; - private long sent = -1; - private int outStanding = -1; - private long zxid = -1; - private String mode = null; - private int nodeCount = -1; - private int watches = -1; - private int connections = -1; - - public ZooKeeperState(String connectionString) { - String host = connectionString.substring(0, - connectionString.indexOf(':')); - int port = Integer.parseInt(connectionString.substring(connectionString - .indexOf(':') + 1)); - this.host = host; - this.port = port; - } - - public void getZookeeperInfo() { - String content = cmd("srvr"); - if (StringUtils.isNotBlank(content)) { - Scanner scannerForStat = new Scanner(content); - while (scannerForStat.hasNext()) { - String line = scannerForStat.nextLine(); - if (line.startsWith("Latency min/avg/max:")) { - String[] latencys = getStringValueFromLine(line).split("/"); - minLatency = Integer.parseInt(latencys[0]); - avgLatency = Integer.parseInt(latencys[1]); - maxLatency = Integer.parseInt(latencys[2]); - } else if (line.startsWith("Received:")) { - received = Long.parseLong(getStringValueFromLine(line)); - } else if (line.startsWith("Sent:")) { - sent = Long.parseLong(getStringValueFromLine(line)); - } else if (line.startsWith("Outstanding:")) { - outStanding = Integer.parseInt(getStringValueFromLine(line)); - } else if (line.startsWith("Zxid:")) { - zxid = Long.parseLong(getStringValueFromLine(line).substring(2), 16); - } else if (line.startsWith("Mode:")) { - mode = getStringValueFromLine(line); - } else if (line.startsWith("Node count:")) { - nodeCount = Integer.parseInt(getStringValueFromLine(line)); - } - } - scannerForStat.close(); - } - - String wchsText = cmd("wchs"); - if (StringUtils.isNotBlank(wchsText)) { - Scanner scannerForWchs = new Scanner(wchsText); - while (scannerForWchs.hasNext()) { - String line = scannerForWchs.nextLine(); - if (line.startsWith("Total watches:")) { - watches = Integer.parseInt(getStringValueFromLine(line)); - } - } - scannerForWchs.close(); - } - - String consText = cmd("cons"); - if (StringUtils.isNotBlank(consText)) { - Scanner scannerForCons = new Scanner(consText); - if (StringUtils.isNotBlank(consText)) { - connections = 0; - } - while (scannerForCons.hasNext()) { - @SuppressWarnings("unused") - String line = scannerForCons.nextLine(); - ++connections; - } - scannerForCons.close(); - } - } - - - public boolean ruok() { - return "imok\n".equals(cmd("ruok")); - } - - - private String getStringValueFromLine(String line) { - return line.substring(line.indexOf(":") + 1, line.length()).replaceAll( - " ", "").trim(); - } - - private class SendThread extends Thread { - private String cmd; - - public String ret = ""; - - public SendThread(String cmd) { - this.cmd = cmd; - } - - @Override - public void run() { - try { - ret = FourLetterWordMain.send4LetterWord(host, port, cmd); - } catch (IOException e) { - logger.error(e.getMessage(),e); - return; - } - } - - } - - private String cmd(String cmd) { - final int waitTimeout = 5; - SendThread sendThread = new SendThread(cmd); - sendThread.setName("FourLetterCmd:" + cmd); - sendThread.start(); - try { - sendThread.join(waitTimeout * 1000); - return sendThread.ret; - } catch (InterruptedException e) { - logger.error("send " + cmd + " to server " + host + ":" + port + " failed!", e); - } - return ""; - } - - public Logger getLogger() { - return logger; - } - - public String getHost() { - return host; - } - - public int getPort() { - return port; - } - - public int getMinLatency() { - return minLatency; - } - - public int getAvgLatency() { - return avgLatency; - } - - public int getMaxLatency() { - return maxLatency; - } - - public long getReceived() { - return received; - } - - public long getSent() { - return sent; - } - - public int getOutStanding() { - return outStanding; - } - - public long getZxid() { - return zxid; - } - - public String getMode() { - return mode; - } - - public int getNodeCount() { - return nodeCount; - } - - public int getWatches() { - return watches; - } - - public int getConnections() { - return connections; - } - - @Override - public String toString() { - return "ZooKeeperState [host=" + host + ", port=" + port - + ", minLatency=" + minLatency + ", avgLatency=" + avgLatency - + ", maxLatency=" + maxLatency + ", received=" + received - + ", sent=" + sent + ", outStanding=" + outStanding + ", zxid=" - + zxid + ", mode=" + mode + ", nodeCount=" + nodeCount - + ", watches=" + watches + ", connections=" - + connections + "]"; - } - - - -} diff --git a/escheduler-api/src/main/java/cn/escheduler/api/utils/ZookeeperMonitor.java b/escheduler-api/src/main/java/cn/escheduler/api/utils/ZookeeperMonitor.java deleted file mode 100644 index 474a56f486..0000000000 --- a/escheduler-api/src/main/java/cn/escheduler/api/utils/ZookeeperMonitor.java +++ /dev/null @@ -1,89 +0,0 @@ -package cn.escheduler.api.utils; - -import cn.escheduler.common.enums.ZKNodeType; -import cn.escheduler.common.zk.AbstractZKClient; -import cn.escheduler.common.model.MasterServer; -import cn.escheduler.dao.entity.ZookeeperRecord; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - - -/** - * monitor zookeeper info - */ -public class ZookeeperMonitor extends AbstractZKClient{ - - private static final Logger LOG = LoggerFactory.getLogger(ZookeeperMonitor.class); - private static final String zookeeperList = AbstractZKClient.getZookeeperQuorum(); - - /** - * - * @return zookeeper info list - */ - public static List zookeeperInfoList(){ - String zookeeperServers = zookeeperList.replaceAll("[\\t\\n\\x0B\\f\\r]", ""); - try{ - return zookeeperInfoList(zookeeperServers); - }catch(Exception e){ - LOG.error(e.getMessage(),e); - } - return null; - } - - /** - * get master servers - * @return - */ - public List getMasterServers(){ - return getServersList(ZKNodeType.MASTER); - } - - /** - * master construct is the same with worker, use the master instead - * @return - */ - public List getWorkerServers(){ - return getServersList(ZKNodeType.WORKER); - } - - private static List zookeeperInfoList(String zookeeperServers) { - - List list = new ArrayList<>(5); - - if(StringUtils.isNotBlank(zookeeperServers)){ - String[] zookeeperServersArray = zookeeperServers.split(","); - - for (String zookeeperServer : zookeeperServersArray) { - ZooKeeperState state = new ZooKeeperState(zookeeperServer); - boolean ok = state.ruok(); - if(ok){ - state.getZookeeperInfo(); - } - - String hostName = zookeeperServer; - int connections = state.getConnections(); - int watches = state.getWatches(); - long sent = state.getSent(); - long received = state.getReceived(); - String mode = state.getMode(); - int minLatency = state.getMinLatency(); - int avgLatency = state.getAvgLatency(); - int maxLatency = state.getMaxLatency(); - int nodeCount = state.getNodeCount(); - int status = ok ? 1 : 0; - Date date = new Date(); - - ZookeeperRecord zookeeperRecord = new ZookeeperRecord(hostName,connections,watches,sent,received,mode,minLatency,avgLatency,maxLatency,nodeCount,status,date); - list.add(zookeeperRecord); - - } - } - - return list; - } -} diff --git a/escheduler-api/src/main/resources/apiserver_logback.xml b/escheduler-api/src/main/resources/apiserver_logback.xml deleted file mode 100644 index 43e6af951a..0000000000 --- a/escheduler-api/src/main/resources/apiserver_logback.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - INFO - - ${log.base}/escheduler-api-server.log - - ${log.base}/escheduler-api-server.%d{yyyy-MM-dd_HH}.%i.log - 168 - 64MB - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - - \ No newline at end of file diff --git a/escheduler-api/src/main/resources/application.properties b/escheduler-api/src/main/resources/application.properties deleted file mode 100644 index b817c18a4a..0000000000 --- a/escheduler-api/src/main/resources/application.properties +++ /dev/null @@ -1,19 +0,0 @@ -# server port -server.port=12345 - -# session config -server.servlet.session.timeout=7200 - -server.servlet.context-path=/escheduler/ - -# file size limit for upload -spring.servlet.multipart.max-file-size=1024MB -spring.servlet.multipart.max-request-size=1024MB - -#post content -server.jetty.max-http-post-size=5000000 - -spring.messages.encoding=UTF-8 - -#i18n classpath folder , file prefix messages, if have many files, use "," seperator -spring.messages.basename=i18n/messages diff --git a/escheduler-api/src/main/resources/combined_logback.xml b/escheduler-api/src/main/resources/combined_logback.xml deleted file mode 100644 index fa1a95eeb2..0000000000 --- a/escheduler-api/src/main/resources/combined_logback.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - %highlight([%level]) %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{10}:[%line] - %msg%n - - UTF-8 - - - - - INFO - - - - taskAppId - ${log.base} - - - - ${log.base}/${taskAppId}.log - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - true - - - - - - ${log.base}/escheduler-combined.log - - INFO - - - - ${log.base}/escheduler-combined.%d{yyyy-MM-dd_HH}.%i.log - 168 - 200MB - -       - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - -    - - - - - - - - - \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/HttpClientTest.java b/escheduler-api/src/test/java/cn/escheduler/api/HttpClientTest.java deleted file mode 100644 index 1d527b563c..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/HttpClientTest.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api; - -import org.apache.http.NameValuePair; -import org.apache.http.client.entity.UrlEncodedFormEntity; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.message.BasicNameValuePair; -import org.apache.http.util.EntityUtils; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.URI; -import java.util.ArrayList; -import java.util.List; - -public class HttpClientTest { - - private static final Logger logger = LoggerFactory.getLogger(HttpClientTest.class); - - @Test - public void doPOSTParam()throws Exception{ - // create HttpClient - CloseableHttpClient httpclient = HttpClients.createDefault(); - - // create http post request - HttpPost httpPost = new HttpPost("http://localhost:12345/escheduler/projects/create"); - httpPost.setHeader("token", "123"); - // set parameters - List parameters = new ArrayList(); - parameters.add(new BasicNameValuePair("projectName", "qzw")); - parameters.add(new BasicNameValuePair("desc", "qzw")); - - UrlEncodedFormEntity formEntity = new UrlEncodedFormEntity(parameters); - httpPost.setEntity(formEntity); - - - CloseableHttpResponse response = null; - try { - // execute - response = httpclient.execute(httpPost); - // response status code 200 - if (response.getStatusLine().getStatusCode() == 200) { - String content = EntityUtils.toString(response.getEntity(), "UTF-8"); - logger.info(content); - } - } finally { - if (response != null) { - response.close(); - } - httpclient.close(); - } - } - - /** - * do get param path variables chinese - * @throws Exception - */ - @Test - public void doGETParamPathVariableAndChinese()throws Exception{ - // create HttpClient - CloseableHttpClient httpclient = HttpClients.createDefault(); - - List parameters = new ArrayList(); - // parameters.add(new BasicNameValuePair("pageSize", "10")); - - // define the parameters of the request - URI uri = new URIBuilder("http://localhost:12345/escheduler/projects/%E5%85%A8%E9%83%A8%E6%B5%81%E7%A8%8B%E6%B5%8B%E8%AF%95/process/list") - .build(); - - // create http GET request - HttpGet httpGet = new HttpGet(uri); - httpGet.setHeader("token","10f5625a2a1cbf9aa710653796c5d764"); - //response object - CloseableHttpResponse response = null; - try { - // execute http get request - response = httpclient.execute(httpGet); - // response status code 200 - if (response.getStatusLine().getStatusCode() == 200) { - String content = EntityUtils.toString(response.getEntity(), "UTF-8"); - logger.info("start--------------->"); - logger.info(content); - logger.info("end----------------->"); - } - } finally { - if (response != null) { - response.close(); - } - httpclient.close(); - } - } - - /** - * - * do get param - * @throws Exception - */ - @Test - public void doGETParam()throws Exception{ - // create HttpClient - CloseableHttpClient httpclient = HttpClients.createDefault(); - - List parameters = new ArrayList(); - parameters.add(new BasicNameValuePair("startDate", "2018-04-22 19:30:08")); - parameters.add(new BasicNameValuePair("endDate", "2028-04-22 19:30:08")); - parameters.add(new BasicNameValuePair("projectId", "0")); - - // define the parameters of the request - URI uri = new URIBuilder("http://localhost:12345/escheduler/projects/analysis/queue-count") - .setParameters(parameters) - .build(); - - // create http GET request - HttpGet httpGet = new HttpGet(uri); - httpGet.setHeader("token","2aef24c052c212fab9eec78848c2258b"); - //response object - CloseableHttpResponse response = null; - try { - // execute http get request - response = httpclient.execute(httpGet); - // response status code 200 - if (response.getStatusLine().getStatusCode() == 200) { - String content = EntityUtils.toString(response.getEntity(), "UTF-8"); - logger.info("start--------------->"); - logger.info(content); - logger.info("end----------------->"); - } - } finally { - if (response != null) { - response.close(); - } - httpclient.close(); - } - } - -} diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/AbstractControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/AbstractControllerTest.java deleted file mode 100644 index c56b2c1d04..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/AbstractControllerTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.service.SessionService; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.apache.commons.lang3.StringUtils; -import org.junit.*; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; -import org.springframework.test.web.servlet.MockMvc; -import org.springframework.test.web.servlet.setup.MockMvcBuilders; -import org.springframework.web.context.WebApplicationContext; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; - -@Ignore -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class AbstractControllerTest { - private static Logger logger = LoggerFactory.getLogger(AbstractControllerTest.class); - public static final String SESSION_ID = "sessionId"; - - protected MockMvc mockMvc; - - @Autowired - private WebApplicationContext webApplicationContext; - - @Autowired - private SessionService sessionService; - - protected User user; - protected String sessionId; - - @Before - public void setUp() { - mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); - createSession(); - } - - - @After - public void after(){ - sessionService.signOut("127.0.0.1", user); - } - - - private void createSession(){ - - User loginUser = new User(); - loginUser.setId(1); - loginUser.setUserType(UserType.GENERAL_USER); - - user = loginUser; - - String session = sessionService.createSession(loginUser, "127.0.0.1"); - sessionId = session; - - Assert.assertTrue(StringUtils.isNotEmpty(session)); - - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/DataAnalysisControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/DataAnalysisControllerTest.java deleted file mode 100644 index 2722b68f58..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/DataAnalysisControllerTest.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.http.MediaType; -import org.springframework.test.context.junit4.SpringRunner; -import org.springframework.test.web.servlet.MockMvc; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.test.web.servlet.setup.MockMvcBuilders; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; -import org.springframework.web.context.WebApplicationContext; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -@Ignore -@RunWith(SpringRunner.class) -@SpringBootTest -public class DataAnalysisControllerTest { - private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); - - private MockMvc mockMvc; - - @Autowired - private WebApplicationContext webApplicationContext; - - @Before - public void setUp() { - mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); - } - - @Test - public void countTaskState() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("startDate","2019-02-01 00:00:00"); - paramsMap.add("endDate","2019-02-28 00:00:00"); - paramsMap.add("projectId","21"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/task-state-count") - .header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void countProcessInstanceState() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("startDate","2019-02-01 00:00:00"); - paramsMap.add("endDate","2019-02-28 00:00:00"); - paramsMap.add("projectId","21"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/process-state-count") - .header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/DataSourceControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/DataSourceControllerTest.java deleted file mode 100644 index d78ba5c311..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/DataSourceControllerTest.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - - -/** - * data source controller test - */ -public class DataSourceControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(DataSourceControllerTest.class); - - - @Test - public void queryDataSource() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/datasources/list").header("sessionId", sessionId).param("type","HIVE")) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Ignore - @Test - public void connectDataSource() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("name","hive data source"); - paramsMap.add("type","HIVE"); - paramsMap.add("host","192.168.xx.xx"); - paramsMap.add("port","10000"); - paramsMap.add("database","default"); - paramsMap.add("userName","hive"); - paramsMap.add("password",""); - paramsMap.add("other",""); - MvcResult mvcResult = mockMvc.perform(post("/datasources/connect") - .header("sessionId", sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/ExecutorControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/ExecutorControllerTest.java deleted file mode 100644 index e89653fc95..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/ExecutorControllerTest.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * executor controller test - */ -@Ignore -public class ExecutorControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(ExecutorControllerTest.class); - - - @Test - public void startCheckProcessDefinition() throws Exception { - - MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/executors/start-check","project_test1") - .header(SESSION_ID, sessionId) - .param("processDefinitionId","226")) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void getReceiverCc() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - //paramsMap.add("processDefinitionId","4"); - paramsMap.add("processInstanceId","13"); - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/executors/get-receiver-cc","li_sql_test") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/LoggerControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/LoggerControllerTest.java deleted file mode 100644 index f4c5bd8ed8..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/LoggerControllerTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; - -/** - * logger controller test - */ -public class LoggerControllerTest extends AbstractControllerTest { - - private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); - - @Test - public void queryLog() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("taskInstId","-1"); - paramsMap.add("skipLineNum","0"); - paramsMap.add("limit","1000"); - - MvcResult mvcResult = mockMvc.perform(get("/log/detail") - .header("sessionId", sessionId) - .params(paramsMap)) -// .andExpect(status().isOk()) -// .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/LoginControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/LoginControllerTest.java deleted file mode 100644 index 97354fea60..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/LoginControllerTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MockMvc; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.test.web.servlet.setup.MockMvcBuilders; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; -import org.springframework.web.context.WebApplicationContext; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * login controller test - */ -public class LoginControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); - - - - @Test - public void login() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("userName","admin"); - paramsMap.add("userPassword","escheduler123"); - - MvcResult mvcResult = mockMvc.perform(post("/login") - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/MonitorControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/MonitorControllerTest.java deleted file mode 100644 index e4c6103cd0..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/MonitorControllerTest.java +++ /dev/null @@ -1,82 +0,0 @@ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import com.alibaba.fastjson.JSONObject; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * monitor controller test - */ -public class MonitorControllerTest extends AbstractControllerTest { - - private static final Logger logger = LoggerFactory.getLogger(MonitorControllerTest.class); - - - @Test - public void listMaster() throws Exception { - - MvcResult mvcResult = mockMvc.perform(get("/monitor/master/list") - .header(SESSION_ID, sessionId) - /* .param("type", ResourceType.FILE.name())*/ ) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - result.getCode().equals(Status.SUCCESS.getCode()); - - - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); - - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - - @Test - public void queryDatabaseState() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/monitor/database") - .header(SESSION_ID, sessionId) - /* .param("type", ResourceType.FILE.name())*/ ) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - result.getCode().equals(Status.SUCCESS.getCode()); - - - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - - @Test - public void queryZookeeperState() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/monitor/zookeeper/list") - .header(SESSION_ID, sessionId) - /* .param("type", ResourceType.FILE.name())*/ ) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - result.getCode().equals(Status.SUCCESS.getCode()); - - - - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/ProcessDefinitionControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/ProcessDefinitionControllerTest.java deleted file mode 100644 index bbd61afbec..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/ProcessDefinitionControllerTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * process definition controller test - */ -public class ProcessDefinitionControllerTest extends AbstractControllerTest{ - - private static Logger logger = LoggerFactory.getLogger(ProcessDefinitionControllerTest.class); - - @Test - public void createProcessDefinition() throws Exception { - String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; - String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("name","dag_test"); - paramsMap.add("processDefinitionJson",json); - paramsMap.add("locations", locations); - paramsMap.add("connects", "[]"); - paramsMap.add("desc", "desc test"); - - MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/process/save","project_test1") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/ProcessInstanceControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/ProcessInstanceControllerTest.java deleted file mode 100644 index b3a2ea8c4a..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/ProcessInstanceControllerTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * process instance controller test - */ -public class ProcessInstanceControllerTest extends AbstractControllerTest { - private static Logger logger = LoggerFactory.getLogger(ProcessInstanceControllerTest.class); - - - @Test - public void queryTaskListByProcessId() throws Exception { - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/task-list-by-process-id","project_test1") - .header(SESSION_ID, sessionId) - .param("processInstanceId","-1")) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/ProjectControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/ProjectControllerTest.java deleted file mode 100644 index 7deda2fafa..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/ProjectControllerTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * project controller - */ -public class ProjectControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(ProcessInstanceControllerTest.class); - - - @Test - public void createProject() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("projectName","project_test1"); - paramsMap.add("desc","the test project"); - - MvcResult mvcResult = mockMvc.perform(post("/projects/create") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/QueueControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/QueueControllerTest.java deleted file mode 100644 index 28e01ee51a..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/QueueControllerTest.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * queue controller test - */ -public class QueueControllerTest extends AbstractControllerTest{ - - private static Logger logger = LoggerFactory.getLogger(QueueControllerTest.class); - - @Test - public void queryList() throws Exception { - - MvcResult mvcResult = mockMvc.perform(get("/queue/list") - .header(SESSION_ID, sessionId)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void queryPagingList() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - //paramsMap.add("processInstanceId","1380"); - paramsMap.add("searchVal",""); - paramsMap.add("pageNo","1"); - paramsMap.add("pageSize","20"); - - MvcResult mvcResult = mockMvc.perform(get("/queue/list-paging") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void createQueue() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("queue","ait"); - paramsMap.add("queueName","ait"); - - MvcResult mvcResult = mockMvc.perform(post("/queue/create") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); -// Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - - - } - - @Test - public void updateQueue() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("id","2"); - paramsMap.add("queue","ait12"); - paramsMap.add("queueName","aitName"); - - MvcResult mvcResult = mockMvc.perform(post("/queue/update") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - //Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void verifyQueue() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("queue","ait123"); - paramsMap.add("queueName","aitName"); - - MvcResult mvcResult = mockMvc.perform(post("/queue/verify-queue") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - //Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/ResourcesControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/ResourcesControllerTest.java deleted file mode 100644 index b1daf8132f..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/ResourcesControllerTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.ResourceType; -import cn.escheduler.common.utils.JSONUtils; -import com.alibaba.fastjson.JSONObject; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * resources controller test - */ -public class ResourcesControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(ResourcesControllerTest.class); - - @Test - public void querytResourceList() throws Exception { - - MvcResult mvcResult = mockMvc.perform(get("/resources/list") - .header(SESSION_ID, sessionId) - .param("type", ResourceType.FILE.name())) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); - - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void verifyResourceName() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("name","list_resources_1.sh"); - paramsMap.add("type","FILE"); - - MvcResult mvcResult = mockMvc.perform(get("/resources/verify-name") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assert.assertEquals(Status.TENANT_NOT_EXIST.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/SchedulerControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/SchedulerControllerTest.java deleted file mode 100644 index 71f63acdb5..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/SchedulerControllerTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * scheduler controller test - */ -public class SchedulerControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); - - @Test - public void queryScheduleList() throws Exception { - MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/list","project_test1") - .header(SESSION_ID, sessionId)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - - @Test - public void previewSchedule() throws Exception { - MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/preview","li_test_1") - .header(SESSION_ID, sessionId) - .param("schedule","{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")) - .andExpect(status().isCreated()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/TaskInstanceControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/TaskInstanceControllerTest.java deleted file mode 100644 index 96db23d061..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/TaskInstanceControllerTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * task instance controller test - */ -public class TaskInstanceControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); - - @Test - public void queryTaskListPaging() throws Exception { - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - //paramsMap.add("processInstanceId","1380"); - paramsMap.add("searchVal",""); - paramsMap.add("taskName",""); - //paramsMap.add("stateType",""); - paramsMap.add("startDate","2019-02-26 19:48:00"); - paramsMap.add("endDate","2019-02-26 19:48:22"); - paramsMap.add("pageNo","1"); - paramsMap.add("pageSize","20"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/task-instance/list-paging","project_test1") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/TenantControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/TenantControllerTest.java deleted file mode 100644 index b4cb7db901..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/TenantControllerTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * tenant controller test - */ -public class TenantControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); - - - @Test - public void countTaskState() throws Exception { - - MvcResult mvcResult = mockMvc.perform(get("/tenant/list") - .header(SESSION_ID, sessionId)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/controller/UsersControllerTest.java b/escheduler-api/src/test/java/cn/escheduler/api/controller/UsersControllerTest.java deleted file mode 100644 index 8425ccbd43..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/controller/UsersControllerTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.controller; - -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - -/** - * users controller test - */ -public class UsersControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(QueueControllerTest.class); - - - @Test - public void queryList() throws Exception { - - MvcResult mvcResult = mockMvc.perform(get("/users/list") - .header(SESSION_ID, sessionId)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/DataAnalysisServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/DataAnalysisServiceTest.java deleted file mode 100644 index 253cae0bba..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/DataAnalysisServiceTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class DataAnalysisServiceTest { - private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceTest.class); - - @Autowired - private DataAnalysisService dataAnalysisService; - - @Test - public void countDefinitionByUser(){ - User loginUser = new User(); - loginUser.setId(27); - loginUser.setUserType(UserType.GENERAL_USER); - Map map = dataAnalysisService.countDefinitionByUser(loginUser, 21); - Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); - } - -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/DataSourceServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/DataSourceServiceTest.java deleted file mode 100644 index ef5305da38..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/DataSourceServiceTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.DbType; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class DataSourceServiceTest { - private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceTest.class); - - @Autowired - private DataSourceService dataSourceService; - - @Test - public void queryDataSourceList(){ - - User loginUser = new User(); - loginUser.setId(27); - loginUser.setUserType(UserType.GENERAL_USER); - Map map = dataSourceService.queryDataSourceList(loginUser, DbType.MYSQL.ordinal()); - Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/ExecutorServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/ExecutorServiceTest.java deleted file mode 100644 index 76d7af1685..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/ExecutorServiceTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.text.MessageFormat; -import java.util.HashMap; -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class ExecutorServiceTest { - private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class); - - @Autowired - private ExecutorService executorService; - - @Ignore - @Test - public void startCheckByProcessDefinedId(){ - - Map map = executorService.startCheckByProcessDefinedId(1234); - Assert.assertNull(map); - - } - - - @Test - public void putMsgWithParamsTest() { - - Map map = new HashMap<>(5); - putMsgWithParams(map, Status.PROJECT_ALREADY_EXISTS); - - logger.info(map.toString()); - } - - - void putMsgWithParams(Map result, Status status,Object ... statusParams) { - result.put(Constants.STATUS, status); - if(statusParams != null && statusParams.length > 0){ - result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); - }else { - result.put(Constants.MSG, status.getMsg()); - } - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/LoggerServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/LoggerServiceTest.java deleted file mode 100644 index 22b4a5906a..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/LoggerServiceTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Result; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class LoggerServiceTest { - private static final Logger logger = LoggerFactory.getLogger(LoggerServiceTest.class); - - @Autowired - private LoggerService loggerService; - - @Test - public void queryDataSourceList(){ - - User loginUser = new User(); - loginUser.setId(27); - loginUser.setUserType(UserType.GENERAL_USER); - - Result result = loggerService.queryLog(-1, 0, 100); - - Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue()); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java deleted file mode 100644 index 06032fa5cc..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import com.alibaba.fastjson.JSON; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class ProcessDefinitionServiceTest { - private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceTest.class); - - @Autowired - ProcessDefinitionService processDefinitionService; - - @Test - public void queryProccessDefinitionList() throws Exception { - - User loginUser = new User(); - loginUser.setId(-1); - loginUser.setUserType(UserType.GENERAL_USER); - - Map map = processDefinitionService.queryProccessDefinitionList(loginUser,"project_test1"); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); - } - - @Test - public void queryProcessDefinitionListPagingTest() throws Exception { - - User loginUser = new User(); - loginUser.setId(-1); - loginUser.setUserType(UserType.GENERAL_USER); - Map map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "",1, 5,0); - - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); - } - - @Test - public void deleteProcessDefinitionByIdTest() throws Exception { - - User loginUser = new User(); - loginUser.setId(-1); - loginUser.setUserType(UserType.GENERAL_USER); - Map map = processDefinitionService.deleteProcessDefinitionById(loginUser, "li_sql_test", 6); - - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); - } - - @Test - public void batchDeleteProcessDefinitionByIds() throws Exception { - - User loginUser = new User(); - loginUser.setId(-1); - loginUser.setUserType(UserType.GENERAL_USER); - Map map = processDefinitionService.batchDeleteProcessDefinitionByIds(loginUser, "li_test_1", "2,3"); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java deleted file mode 100644 index db88819f7c..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.DependResult; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import com.alibaba.fastjson.JSON; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.io.IOException; -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class ProcessInstanceServiceTest { - private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceServiceTest.class); - - @Autowired - ProcessInstanceService processInstanceService; - - @Test - public void viewVariables() { - try { - Map map = processInstanceService.viewVariables(-1); - Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); - }catch (Exception e){ - logger.error(e.getMessage(), e); - } - } - - @Test - public void testDependResult(){ - String logString = "[INFO] 2019-03-19 17:11:08.475 cn.escheduler.server.worker.log.TaskLogger:[172] - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n" + - "[INFO] 2019-03-19 17:11:08.476 cn.escheduler.server.worker.runner.TaskScheduleThread:[172] - task : 223_10739_452334 exit status code : 0\n" + - "[root@node2 current]# "; - try { - Map resultMap = - processInstanceService.parseLogForDependentResult(logString); - Assert.assertEquals(resultMap.size() , 1); - } catch (IOException e) { - - } - } - - @Test - public void queryProcessInstanceList() throws Exception { - - User loginUser = new User(); - loginUser.setId(27); - loginUser.setUserType(UserType.GENERAL_USER); - Map map = processInstanceService.queryProcessInstanceList(loginUser, "project_test1", 0, "", "", "", ExecutionStatus.FAILURE, "", 1, 5); - - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); - } - - @Test - public void batchDeleteProcessInstanceByIds() throws Exception { - - User loginUser = new User(); - loginUser.setId(-1); - loginUser.setUserType(UserType.GENERAL_USER); - Map map = processInstanceService.batchDeleteProcessInstanceByIds(loginUser, "li_test_1", "4,2,300"); - - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/ResourcesServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/ResourcesServiceTest.java deleted file mode 100644 index c857cd11a2..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/ResourcesServiceTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.ResourceType; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class ResourcesServiceTest { - private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceTest.class); - - @Autowired - private ResourcesService resourcesService; - - @Test - public void querytResourceList(){ - User loginUser = new User(); - loginUser.setId(-1); - loginUser.setUserType(UserType.GENERAL_USER); - - Map map = resourcesService.queryResourceList(loginUser, ResourceType.FILE); - Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/SchedulerServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/SchedulerServiceTest.java deleted file mode 100644 index 6a24f6438f..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/SchedulerServiceTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.ReleaseState; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.Project; -import cn.escheduler.dao.entity.User; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class SchedulerServiceTest { - private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class); - - @Autowired - private SchedulerService schedulerService; - - @Test - public void testSetScheduleState(){ - User loginUser = new User(); - loginUser.setId(-1); - loginUser.setUserType(UserType.GENERAL_USER); - Project project = new Project(); - project.setName("project_test1"); - project.setId(-1); - - Map map = schedulerService.setScheduleState(loginUser, project.getName(), 44, ReleaseState.ONLINE); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - } - -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/SessionServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/SessionServiceTest.java deleted file mode 100644 index 3df05d1fac..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/SessionServiceTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.apache.commons.lang3.StringUtils; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class SessionServiceTest { - - private static final Logger logger = LoggerFactory.getLogger(SessionServiceTest.class); - - @Autowired - private SessionService sessionService; - - @Test - public void createSession(){ - - User loginUser = new User(); - loginUser.setId(1); - loginUser.setUserType(UserType.GENERAL_USER); - - String session = sessionService.createSession(loginUser, "127.0.0.1"); - Assert.assertTrue(StringUtils.isNotEmpty(session)); - - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/TaskInstanceServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/TaskInstanceServiceTest.java deleted file mode 100644 index 4b4d6f5d01..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/TaskInstanceServiceTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class TaskInstanceServiceTest { - private static final Logger logger = LoggerFactory.getLogger(TaskInstanceServiceTest.class); - - @Autowired - private TaskInstanceService taskInstanceService; - - @Test - public void queryTaskListPaging(){ - - User loginUser = new User(); - loginUser.setId(-1); - loginUser.setUserType(UserType.GENERAL_USER); - - Map map = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "", - "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - PageInfo pageInfo = (PageInfo) map.get("data"); - - if(pageInfo != null){ - logger.info(pageInfo.getLists().toString()); - } - - - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/TenantServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/TenantServiceTest.java deleted file mode 100644 index 8326c84a58..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/TenantServiceTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class TenantServiceTest { - private static final Logger logger = LoggerFactory.getLogger(TenantServiceTest.class); - - @Autowired - private TenantService tenantService; - - @Test - public void queryTenantList(){ - - User loginUser = new User(); - loginUser.setUserType(UserType.ADMIN_USER); - Map map = tenantService.queryTenantList(loginUser); - Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); - logger.info(map.toString()); - - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/UdfFuncServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/UdfFuncServiceTest.java deleted file mode 100644 index d7953041fb..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/UdfFuncServiceTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.api.utils.PageInfo; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class UdfFuncServiceTest { - private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceTest.class); - - @Autowired - private UdfFuncService udfFuncService; - - @Test - public void queryUdfFuncListPaging(){ - - User loginUser = new User(); - loginUser.setId(-1); - loginUser.setUserType(UserType.GENERAL_USER); - - Map map = udfFuncService.queryUdfFuncListPaging(loginUser, "", 1, 10); - Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); - - PageInfo pageInfo = (PageInfo) map.get("data"); - logger.info(pageInfo.getLists().toString()); - - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/UsersServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/UsersServiceTest.java deleted file mode 100644 index acab8f707e..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/UsersServiceTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.api.service; - -import cn.escheduler.api.ApiApplicationServer; -import cn.escheduler.api.enums.Status; -import cn.escheduler.api.utils.Constants; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.User; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Map; - -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -public class UsersServiceTest { - private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class); - - @Autowired - private UsersService usersService; - - @Test - public void getUserInfo(){ - - User loginUser = new User(); - loginUser.setId(19); - loginUser.setUserType(UserType.GENERAL_USER); - Map map = usersService.getUserInfo(loginUser); - Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); - logger.info(map.toString()); - - } -} \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/utils/ZookeeperMonitorUtilsTest.java b/escheduler-api/src/test/java/cn/escheduler/api/utils/ZookeeperMonitorUtilsTest.java deleted file mode 100644 index c79d6ea31d..0000000000 --- a/escheduler-api/src/test/java/cn/escheduler/api/utils/ZookeeperMonitorUtilsTest.java +++ /dev/null @@ -1,30 +0,0 @@ -package cn.escheduler.api.utils; - -import cn.escheduler.common.model.MasterServer; -import org.junit.Assert; -import org.junit.Test; -import java.util.List; - -/** - * zookeeper monitor utils test - */ -public class ZookeeperMonitorUtilsTest { - - - @Test - public void testGetMasterLsit(){ - - ZookeeperMonitor zookeeperMonitor = new ZookeeperMonitor(); - - - List masterServerList = zookeeperMonitor.getMasterServers(); - - List workerServerList = zookeeperMonitor.getWorkerServers(); - - Assert.assertTrue(masterServerList.size() >= 0); - Assert.assertTrue(workerServerList.size() >= 0); - - - } - -} \ No newline at end of file diff --git a/escheduler-common/pom.xml b/escheduler-common/pom.xml deleted file mode 100644 index 712320fba6..0000000000 --- a/escheduler-common/pom.xml +++ /dev/null @@ -1,595 +0,0 @@ - - - 4.0.0 - - escheduler - cn.analysys - 1.1.0-SNAPSHOT - - escheduler-common - escheduler-common - http://maven.apache.org - jar - - UTF-8 - - - - com.alibaba - fastjson - compile - - - - org.apache.httpcomponents - httpclient - - - - junit - junit - test - - - - commons-configuration - commons-configuration - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-databind - - - org.apache.curator - curator-client - 2.12.0 - - - log4j - log4j - - - io.netty - netty - - - - - org.apache.commons - commons-collections4 - - - - org.apache.hadoop - hadoop-common - - - org.slf4j - slf4j-log4j12 - - - jdk.tools - jdk.tools - - - servlet-api - javax.servlet - - - javax.servlet - servlet-api - - - log4j - log4j - - - org.apache.curator - curator-client - - - - commons-configuration - commons-configuration - - - io.grpc - grpc-protobuf - - - io.netty - netty - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-mapper-asl - - - com.google.protobuf - jackson-mapper-asl - - - com.google.code.gson - gson - - - org.apache.commons - commons-math3 - - - xmlenc - xmlenc - - - commons-net - commons-net - - - org.apache.avro - avro - - - org.apache.zookeeper - zookeeper - - - jsr305 - com.google.code.findbugs - - - javax.servlet.jsp - jsp-api - - - jersey-json - com.sun.jersey - - - jersey-server - com.sun.jersey - - - jersey-core - com.sun.jersey - - - xz - org.tukaani - - - - - - - - - - org.apache.hadoop - hadoop-client - - - org.slf4j - slf4j-log4j12 - - - servlet-api - javax.servlet - - - org.codehaus.jackson - jackson-jaxrs - - - org.codehaus.jackson - jackson-xc - - - - - - - org.fusesource.leveldbjni - leveldbjni-all - - - org.apache.zookeeper - zookeeper - - - org.apache.hadoop - hadoop-mapreduce-client-shuffle - - - jersey-client - com.sun.jersey - - - jersey-core - com.sun.jersey - - - jaxb-api - javax.xml.bind - - - log4j - log4j - - - - - - javax.servlet - javax.servlet-api - - - - org.apache.hadoop - hadoop-hdfs - - - javax.servlet - servlet-api - - - io.netty - netty - - - com.google.protobuf - protobuf-java - - - xmlenc - xmlenc - - - io.netty - netty-all - - - org.fusesource.leveldbjni - leveldbjni-all - - - jersey-core - com.sun.jersey - - - jersey-server - com.sun.jersey - - - log4j - log4j - - - - - - org.apache.hadoop - hadoop-aws - - - org.apache.hadoop - hadoop-common - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-annotations - - - - - - org.apache.commons - commons-lang3 - - - - org.postgresql - postgresql - - - - org.apache.hive - hive-jdbc - - - slf4j-log4j12 - org.slf4j - - - org.eclipse.jetty.aggregate - jetty-all - - - - org.apache.ant - ant - - - io.dropwizard.metrics - metrics-json - - - io.dropwizard.metrics - metrics-jvm - - - com.github.joshelser - dropwizard-metrics-hadoop-metrics2-reporter - - - - io.netty - netty-all - - - com.google.code.gson - gson - - - com.google.code.findbugs - jsr305 - - - io.dropwizard.metrics - metrics-core - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.commons - commons-compress - - - org.apache.curator - curator-client - - - org.apache.hadoop - hadoop-auth - - - org.apache.hadoop - hadoop-mapreduce-client-core - - - org.apache.hadoop - hadoop-yarn-api - - - - org.apache.zookeeper - zookeeper - - - org.codehaus.jackson - jackson-jaxrs - - - org.codehaus.jackson - jackson-xc - - - com.google.protobuf - protobuf-java - - - - org.json - json - - - log4j-slf4j-impl - org.apache.logging.log4j - - - javax.servlet - org.eclipse.jetty.orbit - - - servlet-api-2.5 - org.mortbay.jetty - - - jasper-runtime - tomcat - - - slider-core - org.apache.slider - - - hbase-server - org.apache.hbase - - - jersey-client - com.sun.jersey - - - jersey-core - com.sun.jersey - - - jersey-json - com.sun.jersey - - - jersey-server - com.sun.jersey - - - jersey-guice - com.sun.jersey.contribs - - - hbase-common - org.apache.hbase - - - hbase-hadoop2-compat - org.apache.hbase - - - hbase-client - org.apache.hbase - - - hbase-hadoop-compat - org.apache.hbase - - - tephra-hbase-compat-1.0 - co.cask.tephra - - - jaxb-api - javax.xml.bind - - - hive-llap-client - org.apache.hive - - - hive-llap-common - org.apache.hive - - - hive-llap-server - org.apache.hive - - - tephra-core - co.cask.tephra - - - ant - ant - - - stringtemplate - org.antlr - - - antlr-runtime - org.antlr - - - hive-shims - org.apache.hive - - - jsp-api - javax.servlet - - - log4j-api - org.apache.logging.log4j - - - log4j-core - org.apache.logging.log4j - - - log4j-web - org.apache.logging.log4j - - - - - - - ch.qos.logback - logback-classic - - - ch.qos.logback - logback-core - - - com.github.oshi - oshi-core - - - - ru.yandex.clickhouse - clickhouse-jdbc - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - jaxb-api - javax.xml.bind - - - - - - com.microsoft.sqlserver - mssql-jdbc - - - azure-keyvault - com.microsoft.azure - - - - - - org.mortbay.jetty - jsp-2.1 - - - org.mortbay.jetty - servlet-api-2.5 - - - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - ${java.version} - ${java.version} - ${project.build.sourceEncoding} - - - - - diff --git a/escheduler-common/src/main/java/cn/escheduler/common/Constants.java b/escheduler-common/src/main/java/cn/escheduler/common/Constants.java deleted file mode 100644 index 7eaa6b7926..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/Constants.java +++ /dev/null @@ -1,923 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common; - -import cn.escheduler.common.utils.OSUtils; - -import java.util.regex.Pattern; - -/** - * Constants - */ -public final class Constants { - - /** - * zookeeper properties path - */ - public static final String ZOOKEEPER_PROPERTIES_PATH = "zookeeper.properties"; - - /** - * worker properties path - */ - public static final String WORKER_PROPERTIES_PATH = "worker.properties"; - - /** - * master properties path - */ - public static final String MASTER_PROPERTIES_PATH = "master.properties"; - - /** - * hadoop properties path - */ - public static final String HADOOP_PROPERTIES_PATH = "/common/hadoop/hadoop.properties"; - - /** - * common properties path - */ - public static final String COMMON_PROPERTIES_PATH = "/common/common.properties"; - - /** - * dao properties path - */ - public static final String DAO_PROPERTIES_PATH = "/dao/data_source.properties"; - - /** - * fs.defaultFS - */ - public static final String FS_DEFAULTFS = "fs.defaultFS"; - - - /** - * fs s3a endpoint - */ - public static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint"; - - /** - * fs s3a access key - */ - public static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key"; - - /** - * fs s3a secret key - */ - public static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key"; - - - /** - * yarn.resourcemanager.ha.rm.idsfs.defaultFS - */ - public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids"; - - /** - * yarn.application.status.address - */ - public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address"; - - /** - * hdfs configuration - * hdfs.root.user - */ - public static final String HDFS_ROOT_USER = "hdfs.root.user"; - - /** - * hdfs configuration - * data.store2hdfs.basepath - */ - public static final String DATA_STORE_2_HDFS_BASEPATH = "data.store2hdfs.basepath"; - - /** - * data.basedir.path - */ - public static final String DATA_BASEDIR_PATH = "data.basedir.path"; - - /** - * data.download.basedir.path - */ - public static final String DATA_DOWNLOAD_BASEDIR_PATH = "data.download.basedir.path"; - - /** - * process.exec.basepath - */ - public static final String PROCESS_EXEC_BASEPATH = "process.exec.basepath"; - - /** - * escheduler.env.path - */ - public static final String ESCHEDULER_ENV_PATH = "escheduler.env.path"; - - - /** - * python home - */ - public static final String PYTHON_HOME="PYTHON_HOME"; - - /** - * resource.view.suffixs - */ - public static final String RESOURCE_VIEW_SUFFIXS = "resource.view.suffixs"; - - /** - * development.state - */ - public static final String DEVELOPMENT_STATE = "development.state"; - - /** - * res.upload.startup.type - */ - public static final String RES_UPLOAD_STARTUP_TYPE = "res.upload.startup.type"; - - /** - * zookeeper quorum - */ - public static final String ZOOKEEPER_QUORUM = "zookeeper.quorum"; - - /** - * MasterServer directory registered in zookeeper - */ - public static final String ZOOKEEPER_ESCHEDULER_MASTERS = "zookeeper.escheduler.masters"; - - /** - * WorkerServer directory registered in zookeeper - */ - public static final String ZOOKEEPER_ESCHEDULER_WORKERS = "zookeeper.escheduler.workers"; - - /** - * all servers directory registered in zookeeper - */ - public static final String ZOOKEEPER_ESCHEDULER_DEAD_SERVERS = "zookeeper.escheduler.dead.servers"; - - /** - * MasterServer lock directory registered in zookeeper - */ - public static final String ZOOKEEPER_ESCHEDULER_LOCK_MASTERS = "zookeeper.escheduler.lock.masters"; - - /** - * WorkerServer lock directory registered in zookeeper - */ - public static final String ZOOKEEPER_ESCHEDULER_LOCK_WORKERS = "zookeeper.escheduler.lock.workers"; - - /** - * MasterServer failover directory registered in zookeeper - */ - public static final String ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_MASTERS = "zookeeper.escheduler.lock.failover.masters"; - - /** - * WorkerServer failover directory registered in zookeeper - */ - public static final String ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_WORKERS = "zookeeper.escheduler.lock.failover.workers"; - - /** - * MasterServer startup failover runing and fault tolerance process - */ - public static final String ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "zookeeper.escheduler.lock.failover.startup.masters"; - - /** - * need send warn times when master server or worker server failover - */ - public static final int ESCHEDULER_WARN_TIMES_FAILOVER = 3; - - /** - * comma , - */ - public static final String COMMA = ","; - - /** - * COLON : - */ - public static final String COLON = ":"; - - /** - * SINGLE_SLASH / - */ - public static final String SINGLE_SLASH = "/"; - - /** - * DOUBLE_SLASH // - */ - public static final String DOUBLE_SLASH = "//"; - - /** - * SEMICOLON ; - */ - public static final String SEMICOLON = ";"; - - /** - * EQUAL SIGN - */ - public static final String EQUAL_SIGN = "="; - - /** - * ZOOKEEPER_SESSION_TIMEOUT - */ - public static final String ZOOKEEPER_SESSION_TIMEOUT = "zookeeper.session.timeout"; - - public static final String ZOOKEEPER_CONNECTION_TIMEOUT = "zookeeper.connection.timeout"; - - public static final String ZOOKEEPER_RETRY_SLEEP = "zookeeper.retry.sleep"; - - public static final String ZOOKEEPER_RETRY_MAXTIME = "zookeeper.retry.maxtime"; - - - public static final String MASTER_HEARTBEAT_INTERVAL = "master.heartbeat.interval"; - - public static final String MASTER_EXEC_THREADS = "master.exec.threads"; - - public static final String MASTER_EXEC_TASK_THREADS = "master.exec.task.number"; - - - public static final String MASTER_COMMIT_RETRY_TIMES = "master.task.commit.retryTimes"; - - public static final String MASTER_COMMIT_RETRY_INTERVAL = "master.task.commit.interval"; - - - public static final String WORKER_EXEC_THREADS = "worker.exec.threads"; - - public static final String WORKER_HEARTBEAT_INTERVAL = "worker.heartbeat.interval"; - - public static final String WORKER_FETCH_TASK_NUM = "worker.fetch.task.num"; - - public static final String WORKER_MAX_CPULOAD_AVG = "worker.max.cpuload.avg"; - - public static final String WORKER_RESERVED_MEMORY = "worker.reserved.memory"; - - public static final String MASTER_MAX_CPULOAD_AVG = "master.max.cpuload.avg"; - - public static final String MASTER_RESERVED_MEMORY = "master.reserved.memory"; - - - /** - * escheduler tasks queue - */ - public static final String SCHEDULER_TASKS_QUEUE = "tasks_queue"; - - /** - * escheduler need kill tasks queue - */ - public static final String SCHEDULER_TASKS_KILL = "tasks_kill"; - - public static final String ZOOKEEPER_SCHEDULER_ROOT = "zookeeper.escheduler.root"; - - public static final String SCHEDULER_QUEUE_IMPL = "escheduler.queue.impl"; - - - /** - * date format of yyyy-MM-dd HH:mm:ss - */ - public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss"; - - - /** - * date format of yyyyMMddHHmmss - */ - public static final String YYYYMMDDHHMMSS = "yyyyMMddHHmmss"; - - /** - * http connect time out - */ - public static final int HTTP_CONNECT_TIMEOUT = 60 * 1000; - - - /** - * http connect request time out - */ - public static final int HTTP_CONNECTION_REQUEST_TIMEOUT = 60 * 1000; - - /** - * httpclient soceket time out - */ - public static final int SOCKET_TIMEOUT = 60 * 1000; - - /** - * http header - */ - public static final String HTTP_HEADER_UNKNOWN = "unKnown"; - - /** - * http X-Forwarded-For - */ - public static final String HTTP_X_FORWARDED_FOR = "X-Forwarded-For"; - - /** - * http X-Real-IP - */ - public static final String HTTP_X_REAL_IP = "X-Real-IP"; - - /** - * UTF-8 - */ - public static final String UTF_8 = "UTF-8"; - - /** - * user name regex - */ - public static final Pattern REGEX_USER_NAME = Pattern.compile("[a-zA-Z0-9]{3,20}"); - - /** - * email regex - */ - public static final Pattern REGEX_MAIL_NAME = Pattern.compile("^([a-z0-9A-Z]+[-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$"); - - /** - * read permission - */ - public static final int READ_PERMISSION = 2 * 1; - - - /** - * write permission - */ - public static final int WRITE_PERMISSION = 2 * 2; - - - /** - * execute permission - */ - public static final int EXECUTE_PERMISSION = 1; - - /** - * default admin permission - */ - public static final int DEFAULT_ADMIN_PERMISSION = 7; - - - /** - * all permissions - */ - public static final int ALL_PERMISSIONS = READ_PERMISSION | WRITE_PERMISSION | EXECUTE_PERMISSION; - - /** - * max task timeout - */ - public static final int MAX_TASK_TIMEOUT = 24 * 3600; - - - /** - * heartbeat threads number - */ - public static final int defaulWorkerHeartbeatThreadNum = 5; - - /** - * heartbeat interval - */ - public static final int defaultWorkerHeartbeatInterval = 60; - - /** - * worker fetch task number - */ - public static final int defaultWorkerFetchTaskNum = 1; - - /** - * worker execute threads number - */ - public static final int defaultWorkerExecThreadNum = 10; - - /** - * master cpu load - */ - public static final int defaultMasterCpuLoad = Runtime.getRuntime().availableProcessors() * 2; - - /** - * master reserved memory - */ - public static final double defaultMasterReservedMemory = OSUtils.totalMemorySize() / 10; - - /** - * worker cpu load - */ - public static final int defaultWorkerCpuLoad = Runtime.getRuntime().availableProcessors() * 2; - - /** - * worker reserved memory - */ - public static final double defaultWorkerReservedMemory = OSUtils.totalMemorySize() / 10; - - - /** - * master execute threads number - */ - public static final int defaultMasterExecThreadNum = 100; - - - /** - * default master concurrent task execute num - */ - public static final int defaultMasterTaskExecNum = 20; - - /** - * default log cache rows num,output when reach the number - */ - public static final int defaultLogRowsNum = 4 * 16; - - /** - * log flush interval,output when reach the interval - */ - public static final int defaultLogFlushInterval = 1000; - - - /** - * default master heartbeat thread number - */ - public static final int defaulMasterHeartbeatThreadNum = 5; - - - /** - * default master heartbeat interval - */ - public static final int defaultMasterHeartbeatInterval = 60; - - /** - * default master commit retry times - */ - public static final int defaultMasterCommitRetryTimes = 5; - - - /** - * default master commit retry interval - */ - public static final int defaultMasterCommitRetryInterval = 100; - - /** - * time unit secong to minutes - */ - public static final int SEC_2_MINUTES_TIME_UNIT = 60; - - - /*** - * - * rpc port - */ - public static final int RPC_PORT = 50051; - - /** - * forbid running task - */ - public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN"; - - /** - * task record configuration path - */ - public static final String DATA_SOURCE_PROPERTIES = "dao/data_source.properties"; - - public static final String TASK_RECORD_URL = "task.record.datasource.url"; - - public static final String TASK_RECORD_FLAG = "task.record.flag"; - - public static final String TASK_RECORD_USER = "task.record.datasource.username"; - - public static final String TASK_RECORD_PWD = "task.record.datasource.password"; - - public static final String DEFAULT = "Default"; - public static final String USER = "user"; - public static final String PASSWORD = "password"; - public static final String XXXXXX = "******"; - - public static String TASK_RECORD_TABLE_HIVE_LOG = "eamp_hive_log_hd"; - - public static String TASK_RECORD_TABLE_HISTORY_HIVE_LOG = "eamp_hive_hist_log_hd"; - - public static final String STATUS = "status"; - - - - /** - * command parameter keys - */ - public static final String CMDPARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId"; - - public static final String CMDPARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList"; - - public static final String CMDPARAM_RECOVERY_WAITTING_THREAD = "WaittingThreadInstanceId"; - - public static final String CMDPARAM_SUB_PROCESS = "processInstanceId"; - - public static final String CMDPARAM_EMPTY_SUB_PROCESS = "0"; - - public static final String CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId"; - - public static final String CMDPARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId"; - - public static final String CMDPARAM_START_NODE_NAMES = "StartNodeNameList"; - - /** - * complement data start date - */ - public static final String CMDPARAM_COMPLEMENT_DATA_START_DATE = "complementStartDate"; - - /** - * complement data end date - */ - public static final String CMDPARAM_COMPLEMENT_DATA_END_DATE = "complementEndDate"; - - /** - * hadoop configuration - */ - public static final String HADOOP_RM_STATE_ACTIVE = "ACTIVE"; - - public static final String HADOOP_RM_STATE_STANDBY = "STANDBY"; - - public static final String HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT = "resource.manager.httpaddress.port"; - - - /** - * data source config - */ - - public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name"; - - public static final String SPRING_DATASOURCE_URL = "spring.datasource.url"; - - public static final String SPRING_DATASOURCE_USERNAME = "spring.datasource.username"; - - public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password"; - - public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout"; - - public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize"; - - public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle"; - - public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive"; - - public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait"; - - public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis"; - - public static final String SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS = "spring.datasource.timeBetweenConnectErrorMillis"; - - public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis"; - - public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery"; - - public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle"; - - public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow"; - - public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn"; - - public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements"; - - public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit"; - - public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive"; - - public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize"; - - public static final String DEVELOPMENT = "development"; - - public static final String QUARTZ_PROPERTIES_PATH = "quartz.properties"; - - /** - * sleep time - */ - public static final int SLEEP_TIME_MILLIS = 1000; - - /** - * heartbeat for zk info length - */ - public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 6; - - - /** - * hadoop params constant - */ - /** - * jar - */ - public static final String JAR = "jar"; - - /** - * hadoop - */ - public static final String HADOOP = "hadoop"; - - /** - * -D parameter - */ - public static final String D = "-D"; - - /** - * -D mapreduce.job.queuename=ququename - */ - public static final String MR_QUEUE = "mapreduce.job.queuename"; - - - /** - * jdbc class name - */ - /** - * mysql - */ - public static final String JDBC_MYSQL_CLASS_NAME = "com.mysql.jdbc.Driver"; - - /** - * postgresql - */ - public static final String JDBC_POSTGRESQL_CLASS_NAME = "org.postgresql.Driver"; - - /** - * hive - */ - public static final String JDBC_HIVE_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver"; - - /** - * spark - */ - public static final String JDBC_SPARK_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver"; - - /** - * ClickHouse - */ - public static final String JDBC_CLICKHOUSE_CLASS_NAME = "ru.yandex.clickhouse.ClickHouseDriver"; - - /** - * Oracle - */ - public static final String JDBC_ORACLE_CLASS_NAME = "oracle.jdbc.driver.OracleDriver"; - - /** - * Oracle - */ - public static final String JDBC_SQLSERVER_CLASS_NAME = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; - - /** - * spark params constant - */ - public static final String MASTER = "--master"; - - public static final String DEPLOY_MODE = "--deploy-mode"; - - /** - * --class CLASS_NAME - */ - public static final String CLASS = "--class"; - - /** - * --driver-cores NUM - */ - public static final String DRIVER_CORES = "--driver-cores"; - - /** - * --driver-memory MEM - */ - public static final String DRIVER_MEMORY = "--driver-memory"; - - /** - * --num-executors NUM - */ - public static final String NUM_EXECUTORS = "--num-executors"; - - /** - * --executor-cores NUM - */ - public static final String EXECUTOR_CORES = "--executor-cores"; - - /** - * --executor-memory MEM - */ - public static final String EXECUTOR_MEMORY = "--executor-memory"; - - - /** - * --queue QUEUE - */ - public static final String SPARK_QUEUE = "--queue"; - - - /** - * exit code success - */ - public static final int EXIT_CODE_SUCCESS = 0; - - /** - * exit code kill - */ - public static final int EXIT_CODE_KILL = 137; - - /** - * exit code failure - */ - public static final int EXIT_CODE_FAILURE = -1; - - /** - * date format of yyyyMMdd - */ - public static final String PARAMETER_FORMAT_DATE = "yyyyMMdd"; - - /** - * date format of yyyyMMddHHmmss - */ - public static final String PARAMETER_FORMAT_TIME = "yyyyMMddHHmmss"; - - /** - * system date(yyyyMMddHHmmss) - */ - public static final String PARAMETER_DATETIME = "system.datetime"; - - /** - * system date(yyyymmdd) today - */ - public static final String PARAMETER_CURRENT_DATE = "system.biz.curdate"; - - /** - * system date(yyyymmdd) yesterday - */ - public static final String PARAMETER_BUSINESS_DATE = "system.biz.date"; - - /** - * ACCEPTED - */ - public static final String ACCEPTED = "ACCEPTED"; - - /** - * SUCCEEDED - */ - public static final String SUCCEEDED = "SUCCEEDED"; - /** - * NEW - */ - public static final String NEW = "NEW"; - /** - * NEW_SAVING - */ - public static final String NEW_SAVING = "NEW_SAVING"; - /** - * SUBMITTED - */ - public static final String SUBMITTED = "SUBMITTED"; - /** - * FAILED - */ - public static final String FAILED = "FAILED"; - /** - * KILLED - */ - public static final String KILLED = "KILLED"; - /** - * RUNNING - */ - public static final String RUNNING = "RUNNING"; - /** - * underline "_" - */ - public static final String UNDERLINE = "_"; - /** - * quartz job prifix - */ - public static final String QUARTZ_JOB_PRIFIX = "job"; - /** - * quartz job group prifix - */ - public static final String QUARTZ_JOB_GROUP_PRIFIX = "jobgroup"; - /** - * projectId - */ - public static final String PROJECT_ID = "projectId"; - /** - * processId - */ - public static final String SCHEDULE_ID = "scheduleId"; - /** - * schedule - */ - public static final String SCHEDULE = "schedule"; - /** - * application regex - */ - public static final String APPLICATION_REGEX = "application_\\d+_\\d+"; - public static final String PID = "pid"; - /** - * month_begin - */ - public static final String MONTH_BEGIN = "month_begin"; - /** - * add_months - */ - public static final String ADD_MONTHS = "add_months"; - /** - * month_end - */ - public static final String MONTH_END = "month_end"; - /** - * week_begin - */ - public static final String WEEK_BEGIN = "week_begin"; - /** - * week_end - */ - public static final String WEEK_END = "week_end"; - /** - * timestamp - */ - public static final String TIMESTAMP = "timestamp"; - public static final char SUBTRACT_CHAR = '-'; - public static final char ADD_CHAR = '+'; - public static final char MULTIPLY_CHAR = '*'; - public static final char DIVISION_CHAR = '/'; - public static final char LEFT_BRACE_CHAR = '('; - public static final char RIGHT_BRACE_CHAR = ')'; - public static final String ADD_STRING = "+"; - public static final String MULTIPLY_STRING = "*"; - public static final String DIVISION_STRING = "/"; - public static final String LEFT_BRACE_STRING = "("; - public static final char P = 'P'; - public static final char N = 'N'; - public static final String SUBTRACT_STRING = "-"; - public static final String GLOBAL_PARAMS = "globalParams"; - public static final String LOCAL_PARAMS = "localParams"; - public static final String PROCESS_INSTANCE_STATE = "processInstanceState"; - public static final String TASK_LIST = "taskList"; - public static final String RWXR_XR_X = "rwxr-xr-x"; - - /** - * master/worker server use for zk - */ - public static final String MASTER_PREFIX = "master"; - public static final String WORKER_PREFIX = "worker"; - public static final String DELETE_ZK_OP = "delete"; - public static final String ADD_ZK_OP = "add"; - public static final String ALIAS = "alias"; - public static final String CONTENT = "content"; - public static final String DEPENDENT_SPLIT = ":||"; - public static final String DEPENDENT_ALL = "ALL"; - - - /** - * preview schedule execute count - */ - public static final int PREVIEW_SCHEDULE_EXECUTE_COUNT = 5; - - /** - * kerberos - */ - public static final String KERBEROS = "kerberos"; - - /** - * java.security.krb5.conf - */ - public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; - - /** - * java.security.krb5.conf.path - */ - public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path"; - - /** - * hadoop.security.authentication - */ - public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; - - /** - * hadoop.security.authentication - */ - public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state"; - - - /** - * loginUserFromKeytab user - */ - public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username"; - - /** - * default worker group id - */ - public static final int DEFAULT_WORKER_ID = -1; - - /** - * loginUserFromKeytab path - */ - public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path"; - - - /** - * hive conf - */ - public static final String HIVE_CONF = "hiveconf:"; - - //flink 任务 - public static final String FLINK_YARN_CLUSTER = "yarn-cluster"; - public static final String FLINK_RUN_MODE = "-m"; - public static final String FLINK_YARN_SLOT = "-ys"; - public static final String FLINK_APP_NAME = "-ynm"; - public static final String FLINK_TASK_MANAGE = "-yn"; - - public static final String FLINK_JOB_MANAGE_MEM = "-yjm"; - public static final String FLINK_TASK_MANAGE_MEM = "-ytm"; - public static final String FLINK_detach = "-d"; - public static final String FLINK_MAIN_CLASS = "-c"; - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/IStoppable.java b/escheduler-common/src/main/java/cn/escheduler/common/IStoppable.java deleted file mode 100644 index 872d63c1d8..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/IStoppable.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common; - -/** - * server stop interface. - */ -public interface IStoppable { - /** - * Stop this service. - * @param cause why stopping - */ - public void stop(String cause); - -} \ No newline at end of file diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/AlertStatus.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/AlertStatus.java deleted file mode 100644 index 6da093ed9a..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/AlertStatus.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * alert status - */ -public enum AlertStatus { - /** - * 0 waiting executed; 1 execute successfully,2 execute failed - */ - WAIT_EXECUTION,EXECUTION_SUCCESS,EXECUTION_FAILURE -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/AlertType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/AlertType.java deleted file mode 100644 index 1acf2db791..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/AlertType.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * warning message notification method - */ -public enum AlertType { - /** - * 0 email; 1 SMS - */ - EMAIL,SMS -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/CommandType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/CommandType.java deleted file mode 100644 index 3f83510819..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/CommandType.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * command types - */ -public enum CommandType { - - /** - * command types - * 0 start a new process - * 1 start a new process from current nodes - * 2 recover tolerance fault work flow - * 3 start process from paused task nodes - * 4 start process from failure task nodes - * 5 complement data - * 6 start a new process from scheduler - * 7 repeat running a work flow - * 8 pause a process - * 9 stop a process - * 10 recover waiting thread - */ - START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS, - START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD; -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/CycleEnum.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/CycleEnum.java deleted file mode 100644 index 48a9a5c163..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/CycleEnum.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * cycle enums - */ -public enum CycleEnum { - /** - * 0 minute; 1 hour; 2 day; 3 week; 4 month; 5 year; - */ - MINUTE, HOUR, DAY, WEEK, MONTH, YEAR - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/DataType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/DataType.java deleted file mode 100644 index 0c0e3f323e..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/DataType.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * data types in user define parameter - */ -public enum DataType { - /** - * 0 string - * 1 integer - * 2 long - * 3 float - * 4 double - * 5 date, "YYYY-MM-DD" - * 6 time, "HH:MM:SS" - * 7 time stamp - * 8 Boolean - */ - VARCHAR,INTEGER,LONG,FLOAT,DOUBLE,DATE,TIME,TIMESTAMP,BOOLEAN -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/DbType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/DbType.java deleted file mode 100644 index bf0ebba60c..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/DbType.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * data base types - */ -public enum DbType { - /** - * 0 mysql - * 1 postgresql - * 2 hive - * 3 spark - * 4 clickhouse - * 5 oracle - * 6 sqlserver - */ - MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE, ORACLE, SQLSERVER -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/DependResult.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/DependResult.java deleted file mode 100644 index 7a9a9ac7c3..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/DependResult.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * depend result - */ -public enum DependResult { - - - /** - * 0 success - * 1 waiting - * 2 failed - */ - SUCCESS, WAITING, FAILED -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/DependStrategy.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/DependStrategy.java deleted file mode 100644 index c485351462..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/DependStrategy.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * depend strategy - */ -public enum DependStrategy { - - /** - * 0 none,1 all success 2 all failed 3 one success 4 one failed - */ - NONE, ALL_SUCCESS, ALL_FAILED, ONE_SUCCESS, ONE_FAILED - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/DependentRelation.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/DependentRelation.java deleted file mode 100644 index f95059f201..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/DependentRelation.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * dependent relation: and or - */ -public enum DependentRelation { - - AND,OR; -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/Direct.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/Direct.java deleted file mode 100644 index c7d8afc006..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/Direct.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * parameter of stored procedure - */ -public enum Direct { - /** - * 0 in; 1 out; - */ - IN,OUT -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/ExecutionStatus.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/ExecutionStatus.java deleted file mode 100644 index 4efcc09d6c..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/ExecutionStatus.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - - -/** - * runing status for workflow and task nodes - * - */ -public enum ExecutionStatus { - - /** - * status: - * 0 submit success - * 1 running - * 2 ready pause - * 3 pause - * 4 ready stop - * 5 stop - * 6 failure - * 7 success - * 8 need fault tolerance - * 9 kill - * 10 waiting thread - * 11 waiting depend node complete - */ - SUBMITTED_SUCCESS,RUNNING_EXEUTION,READY_PAUSE,PAUSE,READY_STOP,STOP,FAILURE,SUCCESS, - NEED_FAULT_TOLERANCE,KILL,WAITTING_THREAD,WAITTING_DEPEND; - - - /** - * status is success - * @return - */ - public boolean typeIsSuccess(){ - return this == SUCCESS; - } - - /** - * status is failure - * @return - */ - public boolean typeIsFailure(){ - return this == FAILURE || this == NEED_FAULT_TOLERANCE; - } - - /** - * status is finished - * @return - */ - public boolean typeIsFinished(){ - - return typeIsSuccess() || typeIsFailure() || typeIsCancel() || typeIsPause() - || typeIsWaittingThread(); - } - - /** - * status is waiting thread - * @return - */ - public boolean typeIsWaittingThread(){ - return this == WAITTING_THREAD; - } - - /** - * status is pause - * @return - */ - public boolean typeIsPause(){ - return this == PAUSE; - } - - /** - * status is running - * @return - */ - public boolean typeIsRunning(){ - return this == RUNNING_EXEUTION || this == WAITTING_DEPEND; - } - - /** - * status is cancel - */ - public boolean typeIsCancel(){ return this == KILL || this == STOP ;} - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/FailureStrategy.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/FailureStrategy.java deleted file mode 100644 index 3862d41537..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/FailureStrategy.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * failure policy when some task node failed. - */ -public enum FailureStrategy { - - /** - * 0 ending process when some tasks failed. - * 1 continue running when some tasks failed. - **/ - END, CONTINUE; - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/Flag.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/Flag.java deleted file mode 100644 index 25af24ff68..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/Flag.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * have_script - * have_file - * can_retry - * have_arr_variables - * have_map_variables - * have_alert - */ -public enum Flag { - /** - * 0 no - * 1 yes - */ - NO,YES -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/HttpCheckCondition.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/HttpCheckCondition.java deleted file mode 100644 index b88ec01980..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/HttpCheckCondition.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * http check condition - */ -public enum HttpCheckCondition { - /** - * 0 status_code_default:200 - * 1 status_code_custom - * 2 body_contains - * 3 body_not_contains - */ - STATUS_CODE_DEFAULT,STATUS_CODE_CUSTOM, BODY_CONTAINS, BODY_NOT_CONTAINS -} \ No newline at end of file diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/HttpMethod.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/HttpMethod.java deleted file mode 100644 index 32cbf1c42f..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/HttpMethod.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * http method - */ -public enum HttpMethod { - /** - * 0 get - * 1 post - * 2 head - * 3 put - * 4 delete - */ - GET, POST, HEAD, PUT, DELETE -} \ No newline at end of file diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/HttpParametersType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/HttpParametersType.java deleted file mode 100644 index 21f4d8b3cc..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/HttpParametersType.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * http parameters type - */ -public enum HttpParametersType { - /** - * 0 parameter; - * 1 body; - * 2 headers; - */ - PARAMETER,BODY,HEADERS -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/Priority.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/Priority.java deleted file mode 100644 index 3aa1f22539..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/Priority.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * define process and task priority - */ -public enum Priority { - /** - * 0 highest priority - * 1 higher priority - * 2 medium priority - * 3 lower priority - * 4 lowest priority - */ - HIGHEST,HIGH,MEDIUM,LOW,LOWEST -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/ProgramType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/ProgramType.java deleted file mode 100644 index 1754f652f8..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/ProgramType.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - - -/** - * support program types - */ -public enum ProgramType { - /** - * 0 JAVA,1 SCALA,2 PYTHON - */ - JAVA, - SCALA, - PYTHON -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/ReleaseState.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/ReleaseState.java deleted file mode 100644 index 47a5680de6..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/ReleaseState.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * process define release state - */ -public enum ReleaseState { - - /** - * 0 offline - * 1 on line - */ - OFFLINE,ONLINE; - - - public static ReleaseState getEnum(int value){ - for (ReleaseState e:ReleaseState.values()) { - if(e.ordinal() == value) { - return e; - } - } - //For values out of enum scope - return null; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/ResUploadType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/ResUploadType.java deleted file mode 100644 index 65d8be8f92..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/ResUploadType.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * data base types - */ -public enum ResUploadType { - /** - * 0 hdfs - * 1 s3 - * 2 none - */ - HDFS,S3,NONE -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/ResourceType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/ResourceType.java deleted file mode 100644 index 2f1dcdde65..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/ResourceType.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * resource type - */ -public enum ResourceType { - /** - * 0 file, 1 udf - */ - FILE,UDF -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/RunMode.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/RunMode.java deleted file mode 100644 index 09a13ef8d3..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/RunMode.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * complement data run mode - */ -public enum RunMode { - /** - * 0 serial run - * 1 parallel run - * */ - RUN_MODE_SERIAL, RUN_MODE_PARALLEL -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/SelfDependStrategy.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/SelfDependStrategy.java deleted file mode 100644 index c5935bdc6e..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/SelfDependStrategy.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * self depency strategy - */ -public enum SelfDependStrategy { - - /** - * 0 donot depend the last cycle; - * 1 depend the last cycle - **/ - NO_DEP_PRE, DEP_PRE -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/ServerEnum.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/ServerEnum.java deleted file mode 100644 index 5c27f6567b..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/ServerEnum.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * cycle enums - */ -public enum ServerEnum { - - /** - * master server , worker server - */ - MASTER_SERVER,WORKER_SERVER - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/ShowType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/ShowType.java deleted file mode 100644 index d7ee0e51af..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/ShowType.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * show type for email - */ -public enum ShowType { - /** - * 0 TABLE; - * 1 TEXT; - * 2 attachment; - * 3 TABLE+attachment; - */ - TABLE, - TEXT, - ATTACHMENT, - TABLEATTACHMENT - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskDependType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskDependType.java deleted file mode 100644 index 9d6a069592..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskDependType.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * task node depend type - */ -public enum TaskDependType { - /** - * 0 run current tasks only - * 1 run current tasks and previous tasks - * 2 run current tasks and the other tasks that depend on current tasks; - */ - TASK_ONLY, TASK_PRE, TASK_POST; - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskRecordStatus.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskRecordStatus.java deleted file mode 100644 index a0fb8f1dfc..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskRecordStatus.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - - -/** - * task record status - * - */ -public enum TaskRecordStatus { - - /** - * status: - * 0 sucess - * 1 failure - * 2 exception - */ - SUCCESS,FAILURE,EXCEPTION - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskStateType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskStateType.java deleted file mode 100644 index 678e8e5be8..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskStateType.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * type of task state - */ -public enum TaskStateType { - /** - * 0 waiting running - * 1 running - * 2 finish - * 3 failed - * 4 success - */ - WAITTING, RUNNING, FINISH, FAILED, SUCCESS; - - - /** - * convert task state to execute status integer array ; - * @param taskStateType - * @return - */ - public static int[] convert2ExecutStatusIntArray(TaskStateType taskStateType){ - - switch (taskStateType){ - case SUCCESS: - return new int[]{ExecutionStatus.SUCCESS.ordinal()}; - case FAILED: - return new int[]{ - ExecutionStatus.FAILURE.ordinal(), - ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal()}; - case FINISH: - return new int[]{ - ExecutionStatus.PAUSE.ordinal(), - ExecutionStatus.STOP.ordinal() - }; - case RUNNING: - return new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), - ExecutionStatus.RUNNING_EXEUTION.ordinal(), - ExecutionStatus.READY_PAUSE.ordinal(), - ExecutionStatus.READY_STOP.ordinal()}; - case WAITTING: - return new int[]{ - ExecutionStatus.SUBMITTED_SUCCESS.ordinal() - }; - default: - break; - } - return null; - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskTimeoutStrategy.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskTimeoutStrategy.java deleted file mode 100644 index c454c98b10..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskTimeoutStrategy.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * task timeout strategy - */ -public enum TaskTimeoutStrategy { - /** - * 0 warn - * 1 failed - * 2 warn+failed - */ - WARN, FAILED, WARNFAILED -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskType.java deleted file mode 100644 index a212261db6..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/TaskType.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * task node type - */ -public enum TaskType { - /** - * 0 SHELL - * 1 SQL - * 2 SUB_PROCESS - * 3 PROCEDURE - * 4 MR - * 5 SPARK - * 6 PYTHON - * 7 DEPENDENT - * 8 FLINK - * 9 HTTP - */ - SHELL,SQL, SUB_PROCESS,PROCEDURE,MR,SPARK,PYTHON,DEPENDENT,FLINK,HTTP; - - public static boolean typeIsNormalTask(String typeName) { - TaskType taskType = TaskType.valueOf(typeName); - return !(taskType == TaskType.SUB_PROCESS || taskType == TaskType.DEPENDENT); - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/UdfType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/UdfType.java deleted file mode 100644 index 7114b52f50..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/UdfType.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * UDF type - */ -public enum UdfType { - /** - * 0 hive; 1 spark - */ - HIVE, SPARK -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/UserType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/UserType.java deleted file mode 100644 index 7f36694295..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/UserType.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * user type - */ -public enum UserType { - /** - * 0 admin user; 1 general user - */ - ADMIN_USER, - GENERAL_USER -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/WarningType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/WarningType.java deleted file mode 100644 index 9c8e5b3c12..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/WarningType.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.enums; - -/** - * types for whether to send warning when process ending; - */ -public enum WarningType { - /** - * 0 do not send warning; - * 1 send if process success; - * 2 send if process failed; - * 3 send if process ending; - */ - NONE, SUCCESS, FAILURE, ALL; - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/enums/ZKNodeType.java b/escheduler-common/src/main/java/cn/escheduler/common/enums/ZKNodeType.java deleted file mode 100644 index 371231f727..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/enums/ZKNodeType.java +++ /dev/null @@ -1,15 +0,0 @@ -package cn.escheduler.common.enums; - -/** - * zk node type - */ -public enum ZKNodeType { - - /** - * 0 do not send warning; - * 1 send if process success; - * 2 send if process failed; - * 3 send if process ending; - */ - MASTER, WORKER, DEAD_SERVER, TASK_QUEUE; -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/graph/DAG.java b/escheduler-common/src/main/java/cn/escheduler/common/graph/DAG.java deleted file mode 100644 index 43fbb26e5a..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/graph/DAG.java +++ /dev/null @@ -1,519 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.graph; - -import cn.escheduler.common.utils.CollectionUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; - -/** - * analysis of DAG - * Node: node - * NodeInfo:node description information - * EdgeInfo: edge description information - */ -public class DAG { - - - private static final Logger logger = LoggerFactory.getLogger(DAG.class); - - private final ReadWriteLock lock = new ReentrantReadWriteLock(); - - /** - * node map, key is node, value is node information - */ - private volatile Map nodesMap; - - /** - * edge map. key is node of origin;value is Map with key for destination node and value for edge - */ - private volatile Map> edgesMap; - - /** - * reversed edge set,key is node of destination, value is Map with key for origin node and value for edge - */ - private volatile Map> reverseEdgesMap; - - - public DAG() { - nodesMap = new HashMap<>(); - edgesMap = new HashMap<>(); - reverseEdgesMap = new HashMap<>(); - } - - - /** - * add node information - * - * @param node node - * @param nodeInfo node information - */ - public void addNode(Node node, NodeInfo nodeInfo) { - lock.writeLock().lock(); - - try{ - nodesMap.put(node, nodeInfo); - }finally { - lock.writeLock().unlock(); - } - - } - - - /** - * add edge - * @param fromNode node of origin - * @param toNode node of destination - * @return The result of adding an edge. returns false if the DAG result is a ring result - */ - public boolean addEdge(Node fromNode, Node toNode) { - return addEdge(fromNode, toNode, false); - } - - - /** - * add edge - * @param fromNode node of origin - * @param toNode node of destination - * @param createNode whether the node needs to be created if it does not exist - * @return The result of adding an edge. returns false if the DAG result is a ring result - */ - private boolean addEdge(Node fromNode, Node toNode, boolean createNode) { - return addEdge(fromNode, toNode, null, createNode); - } - - - /** - * add edge - * - * @param fromNode node of origin - * @param toNode node of destination - * @param edge edge description - * @param createNode whether the node needs to be created if it does not exist - * @return The result of adding an edge. returns false if the DAG result is a ring result - */ - public boolean addEdge(Node fromNode, Node toNode, EdgeInfo edge, boolean createNode) { - lock.writeLock().lock(); - - try{ - - // Whether an edge can be successfully added(fromNode -> toNode) - if (!isLegalAddEdge(fromNode, toNode, createNode)) { - logger.error("serious error: add edge({} -> {}) is invalid, cause cycle!", fromNode, toNode); - return false; - } - - addNodeIfAbsent(fromNode, null); - addNodeIfAbsent(toNode, null); - - addEdge(fromNode, toNode, edge, edgesMap); - addEdge(toNode, fromNode, edge, reverseEdgesMap); - - return true; - }finally { - lock.writeLock().unlock(); - } - - } - - - /** - * whether this node is contained - * - * @param node node - * @return - */ - public boolean containsNode(Node node) { - lock.readLock().lock(); - - try{ - return nodesMap.containsKey(node); - }finally { - lock.readLock().unlock(); - } - } - - - /** - * whether this edge is contained - * - * @param fromNode node of origin - * @param toNode node of destination - * @return - */ - public boolean containsEdge(Node fromNode, Node toNode) { - lock.readLock().lock(); - try{ - Map endEdges = edgesMap.get(fromNode); - if (endEdges == null) { - return false; - } - - return endEdges.containsKey(toNode); - }finally { - lock.readLock().unlock(); - } - } - - - /** - * get node description - * - * @param node node - * @return - */ - public NodeInfo getNode(Node node) { - lock.readLock().lock(); - - try{ - return nodesMap.get(node); - }finally { - lock.readLock().unlock(); - } - } - - - /** - * Get the number of nodes - * - * @return - */ - public int getNodesCount() { - lock.readLock().lock(); - - try{ - return nodesMap.size(); - }finally { - lock.readLock().unlock(); - } - } - - /** - * Get the number of edges - * - * @return - */ - public int getEdgesCount() { - lock.readLock().lock(); - try{ - int count = 0; - - for (Map.Entry> entry : edgesMap.entrySet()) { - count += entry.getValue().size(); - } - - return count; - }finally { - lock.readLock().unlock(); - } - } - - - /** - * get the start node of DAG - * - * @return - */ - public Collection getBeginNode() { - lock.readLock().lock(); - - try{ - return CollectionUtils.subtract(nodesMap.keySet(), reverseEdgesMap.keySet()); - }finally { - lock.readLock().unlock(); - } - - } - - - /** - * get the end node of DAG - * - * @return - */ - public Collection getEndNode() { - - lock.readLock().lock(); - - try{ - return CollectionUtils.subtract(nodesMap.keySet(), edgesMap.keySet()); - }finally { - lock.readLock().unlock(); - } - - } - - - /** - * Gets all previous nodes of the node - * - * @param node node id to be calculated - * @return - */ - public Set getPreviousNodes(Node node) { - lock.readLock().lock(); - - try{ - return getNeighborNodes(node, reverseEdgesMap); - }finally { - lock.readLock().unlock(); - } - } - - - /** - * Get all subsequent nodes of the node - * - * @param node node id to be calculated - * @return - */ - public Set getSubsequentNodes(Node node) { - lock.readLock().lock(); - - try{ - return getNeighborNodes(node, edgesMap); - }finally { - lock.readLock().unlock(); - } - } - - - /** - * Gets the degree of entry of the node - * - * @param node node id - * @return - */ - public int getIndegree(Node node) { - lock.readLock().lock(); - - try{ - return getPreviousNodes(node).size(); - }finally { - lock.readLock().unlock(); - } - } - - - /** - * whether the graph has a ring - * - * @return true if has cycle, else return false. - */ - public boolean hasCycle() { - lock.readLock().lock(); - try{ - return !topologicalSortImpl().getKey(); - }finally { - lock.readLock().unlock(); - } - } - - - /** - * Only DAG has a topological sort - * @return topologically sorted results, returns false if the DAG result is a ring result - * @throws Exception - */ - public List topologicalSort() throws Exception { - lock.readLock().lock(); - - try{ - Map.Entry> entry = topologicalSortImpl(); - - if (entry.getKey()) { - return entry.getValue(); - } - - throw new Exception("serious error: graph has cycle ! "); - }finally { - lock.readLock().unlock(); - } - } - - - /** - * if tho node does not exist,add this node - * - * @param node node - * @param nodeInfo node information - */ - private void addNodeIfAbsent(Node node, NodeInfo nodeInfo) { - if (!containsNode(node)) { - addNode(node, nodeInfo); - } - } - - - /** - * add edge - * - * @param fromNode node of origin - * @param toNode node of destination - * @param edge edge description - * @param edges edge set - */ - private void addEdge(Node fromNode, Node toNode, EdgeInfo edge, Map> edges) { - edges.putIfAbsent(fromNode, new HashMap<>()); - Map toNodeEdges = edges.get(fromNode); - toNodeEdges.put(toNode, edge); - } - - - /** - * Whether an edge can be successfully added(fromNode -> toNode) - * need to determine whether the DAG has cycle - * - * @param fromNode node of origin - * @param toNode node of destination - * @param createNode whether to create a node - * @return - */ - private boolean isLegalAddEdge(Node fromNode, Node toNode, boolean createNode) { - if (fromNode.equals(toNode)) { - logger.error("edge fromNode({}) can't equals toNode({})", fromNode, toNode); - return false; - } - - if (!createNode) { - if (!containsNode(fromNode) || !containsNode(toNode)){ - logger.error("edge fromNode({}) or toNode({}) is not in vertices map", fromNode, toNode); - return false; - } - } - - // Whether an edge can be successfully added(fromNode -> toNode),need to determine whether the DAG has cycle! - int verticesCount = getNodesCount(); - - Queue queue = new LinkedList<>(); - - queue.add(toNode); - - // if DAG doesn't find fromNode, it's not has cycle! - while (!queue.isEmpty() && (--verticesCount > 0)) { - Node key = queue.poll(); - - for (Node subsequentNode : getSubsequentNodes(key)) { - if (subsequentNode.equals(fromNode)) { - return false; - } - - queue.add(subsequentNode); - } - } - - return true; - } - - - /** - * Get all neighbor nodes of the node - * - * @param node Node id to be calculated - * @param edges neighbor edge information - * @return - */ - private Set getNeighborNodes(Node node, final Map> edges) { - final Map neighborEdges = edges.get(node); - - if (neighborEdges == null) { - return Collections.EMPTY_MAP.keySet(); - } - - return neighborEdges.keySet(); - } - - - - /** - * Determine whether there are ring and topological sorting results - * - * Directed acyclic graph (DAG) has topological ordering - * Breadth First Search: - * 1、Traversal of all the vertices in the graph, the degree of entry is 0 vertex into the queue - * 2、Poll a vertex in the queue to update its adjacency (minus 1) and queue the adjacency if it is 0 after minus 1 - * 3、Do step 2 until the queue is empty - * If you cannot traverse all the nodes, it means that the current graph is not a directed acyclic graph. - * There is no topological sort. - * - * - * @return key Returns the state - * if success (acyclic) is true, failure (acyclic) is looped, - * and value (possibly one of the topological sort results) - */ - private Map.Entry> topologicalSortImpl() { - // node queue with degree of entry 0 - Queue zeroIndegreeNodeQueue = new LinkedList<>(); - // save result - List topoResultList = new ArrayList<>(); - // save the node whose degree is not 0 - Map notZeroIndegreeNodeMap = new HashMap<>(); - - // Scan all the vertices and push vertexs with an entry degree of 0 to queue - for (Map.Entry vertices : nodesMap.entrySet()) { - Node node = vertices.getKey(); - int inDegree = getIndegree(node); - - if (inDegree == 0) { - zeroIndegreeNodeQueue.add(node); - topoResultList.add(node); - } else { - notZeroIndegreeNodeMap.put(node, inDegree); - } - } - - /** - * After scanning, there is no node with 0 degree of entry, - * indicating that there is a ring, and return directly - */ - if(zeroIndegreeNodeQueue.isEmpty()){ - return new AbstractMap.SimpleEntry(false, topoResultList); - } - - // The topology algorithm is used to delete nodes with 0 degree of entry and its associated edges - while (!zeroIndegreeNodeQueue.isEmpty()) { - Node v = zeroIndegreeNodeQueue.poll(); - // Get the neighbor node - Set subsequentNodes = getSubsequentNodes(v); - - for (Node subsequentNode : subsequentNodes) { - - Integer degree = notZeroIndegreeNodeMap.get(subsequentNode); - - if(--degree == 0){ - topoResultList.add(subsequentNode); - zeroIndegreeNodeQueue.add(subsequentNode); - notZeroIndegreeNodeMap.remove(subsequentNode); - }else{ - notZeroIndegreeNodeMap.put(subsequentNode, degree); - } - - } - } - - // if notZeroIndegreeNodeMap is empty,there is no ring! - AbstractMap.SimpleEntry resultMap = new AbstractMap.SimpleEntry(notZeroIndegreeNodeMap.size() == 0 , topoResultList); - return resultMap; - - } - -} - diff --git a/escheduler-common/src/main/java/cn/escheduler/common/job/db/BaseDataSource.java b/escheduler-common/src/main/java/cn/escheduler/common/job/db/BaseDataSource.java deleted file mode 100644 index f215d3e8c9..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/job/db/BaseDataSource.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.job.db; - -/** - * data source base class - */ -public abstract class BaseDataSource { - /** - * user name - */ - private String user; - - /** - * user password - */ - private String password; - - /** - * data source address - */ - private String address; - - /** - * database name - */ - private String database; - - /** - * other connection parameters for the data source - */ - private String other; - - /** - * principal - */ - private String principal; - - public String getPrincipal() { - return principal; - } - - public void setPrincipal(String principal) { - this.principal = principal; - } - /** - * test whether the data source can be connected successfully - * @throws Exception - */ - public abstract void isConnectable() throws Exception; - - /** - * gets the JDBC url for the data source connection - * @return - */ - public abstract String getJdbcUrl(); - - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - public void setAddress(String address) { - this.address = address; - } - - public String getAddress() { - return address; - } - - public String getDatabase() { - return database; - } - - public void setDatabase(String database) { - this.database = database; - } - - public String getOther() { - return other; - } - - public void setOther(String other) { - this.other = other; - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/job/db/ClickHouseDataSource.java b/escheduler-common/src/main/java/cn/escheduler/common/job/db/ClickHouseDataSource.java deleted file mode 100644 index b4df4d8f5a..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/job/db/ClickHouseDataSource.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.job.db; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -/** - * data source of ClickHouse - */ -public class ClickHouseDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(ClickHouseDataSource.class); - - /** - * gets the JDBC url for the data source connection - * @return - */ - @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += "?" + getOther(); - } - - return jdbcUrl; - } - - /** - * test whether the data source can be connected successfully - * @throws Exception - */ - @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName("ru.yandex.clickhouse.ClickHouseDriver"); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("ClickHouse datasource try conn close conn error", e); - throw e; - } - } - } - - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/job/db/DataSourceFactory.java b/escheduler-common/src/main/java/cn/escheduler/common/job/db/DataSourceFactory.java deleted file mode 100644 index c710944bd2..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/job/db/DataSourceFactory.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.job.db; - -import cn.escheduler.common.enums.DbType; -import cn.escheduler.common.utils.JSONUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static cn.escheduler.common.Constants.*; - -/** - * produce datasource in this custom defined datasource factory. - */ -public class DataSourceFactory { - - private static final Logger logger = LoggerFactory.getLogger(DataSourceFactory.class); - - public static BaseDataSource getDatasource(DbType dbType, String parameter) { - try { - switch (dbType) { - case MYSQL: - return JSONUtils.parseObject(parameter, MySQLDataSource.class); - case POSTGRESQL: - return JSONUtils.parseObject(parameter, PostgreDataSource.class); - case HIVE: - return JSONUtils.parseObject(parameter, HiveDataSource.class); - case SPARK: - return JSONUtils.parseObject(parameter, SparkDataSource.class); - case CLICKHOUSE: - return JSONUtils.parseObject(parameter, ClickHouseDataSource.class); - case ORACLE: - return JSONUtils.parseObject(parameter, OracleDataSource.class); - case SQLSERVER: - return JSONUtils.parseObject(parameter, SQLServerDataSource.class); - default: - return null; - } - } catch (Exception e) { - logger.error("get datasource object error", e); - return null; - } - } - - /** - * load class - * @param dbType - * @throws Exception - */ - public static void loadClass(DbType dbType) throws Exception{ - switch (dbType){ - case MYSQL : - Class.forName(JDBC_MYSQL_CLASS_NAME); - break; - case POSTGRESQL : - Class.forName(JDBC_POSTGRESQL_CLASS_NAME); - break; - case HIVE : - Class.forName(JDBC_HIVE_CLASS_NAME); - break; - case SPARK : - Class.forName(JDBC_SPARK_CLASS_NAME); - break; - case CLICKHOUSE : - Class.forName(JDBC_CLICKHOUSE_CLASS_NAME); - break; - case ORACLE : - Class.forName(JDBC_ORACLE_CLASS_NAME); - break; - case SQLSERVER: - Class.forName(JDBC_SQLSERVER_CLASS_NAME); - break; - default: - logger.error("not support sql type: {},can't load class", dbType); - throw new IllegalArgumentException("not support sql type,can't load class"); - - } - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/job/db/HiveDataSource.java b/escheduler-common/src/main/java/cn/escheduler/common/job/db/HiveDataSource.java deleted file mode 100644 index 0d45924e8e..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/job/db/HiveDataSource.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.job.db; - -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.*; - -/** - * data source of hive - */ -public class HiveDataSource extends BaseDataSource { - - private static final Logger logger = LoggerFactory.getLogger(HiveDataSource.class); - - - - - /** - * gets the JDBC url for the data source connection - * @return - */ - @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getPrincipal())){ - jdbcUrl += ";principal=" + getPrincipal(); - } - - - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += ";" + getOther(); - } - - return jdbcUrl; - } - - /** - * test whether the data source can be connected successfully - * @throws Exception - */ - @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName("org.apache.hive.jdbc.HiveDriver"); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), ""); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("hive datasource try conn close conn error", e); - throw e; - } - } - } - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/job/db/MySQLDataSource.java b/escheduler-common/src/main/java/cn/escheduler/common/job/db/MySQLDataSource.java deleted file mode 100644 index 62d419e04c..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/job/db/MySQLDataSource.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.job.db; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -/** - * data source of mySQL - */ -public class MySQLDataSource extends BaseDataSource { - - private static final Logger logger = LoggerFactory.getLogger(MySQLDataSource.class); - - /** - * gets the JDBC url for the data source connection - * @return - */ - @Override - public String getJdbcUrl() { - String address = getAddress(); - if (address.lastIndexOf("/") != (address.length() - 1)) { - address += "/"; - } - String jdbcUrl = address + getDatabase(); - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += "?" + getOther(); - } - return jdbcUrl; - } - - /** - * test whether the data source can be connected successfully - * @throws Exception - */ - @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName("com.mysql.jdbc.Driver"); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("Mysql datasource try conn close conn error", e); - throw e; - } - } - } - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/job/db/OracleDataSource.java b/escheduler-common/src/main/java/cn/escheduler/common/job/db/OracleDataSource.java deleted file mode 100644 index 5e245a590c..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/job/db/OracleDataSource.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.job.db; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -/** - * data source of Oracle - */ -public class OracleDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(OracleDataSource.class); - - /** - * gets the JDBC url for the data source connection - * @return - */ - @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += "?" + getOther(); - } - - return jdbcUrl; - } - - /** - * test whether the data source can be connected successfully - * @throws Exception - */ - @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName("oracle.jdbc.driver.OracleDriver"); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("Oracle datasource try conn close conn error", e); - throw e; - } - } - } - - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/job/db/PostgreDataSource.java b/escheduler-common/src/main/java/cn/escheduler/common/job/db/PostgreDataSource.java deleted file mode 100644 index a0c893d6d6..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/job/db/PostgreDataSource.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.job.db; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -/** - * data source of postgreSQL - */ -public class PostgreDataSource extends BaseDataSource { - - private static final Logger logger = LoggerFactory.getLogger(PostgreDataSource.class); - - - /** - * gets the JDBC url for the data source connection - * @return - */ - @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += "?" + getOther(); - } - - return jdbcUrl; - } - - /** - * test whether the data source can be connected successfully - * @throws Exception - */ - @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName("org.postgresql.Driver"); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("Postgre datasource try conn close conn error", e); - throw e; - } - } - } - - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/job/db/SQLServerDataSource.java b/escheduler-common/src/main/java/cn/escheduler/common/job/db/SQLServerDataSource.java deleted file mode 100644 index f4d202a76e..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/job/db/SQLServerDataSource.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.job.db; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -/** - * data source of SQL Server - */ -public class SQLServerDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(SQLServerDataSource.class); - - /** - * gets the JDBC url for the data source connection - * @return - */ - @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - jdbcUrl += ";databaseName=" + getDatabase(); - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += ";" + getOther(); - } - - return jdbcUrl; - } - - /** - * test whether the data source can be connected successfully - * @throws Exception - */ - @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver"); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("SQL Server datasource try conn close conn error", e); - throw e; - } - } - } - - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/job/db/SparkDataSource.java b/escheduler-common/src/main/java/cn/escheduler/common/job/db/SparkDataSource.java deleted file mode 100644 index 13aa06eaae..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/job/db/SparkDataSource.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.job.db; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -/** - * data source of spark - */ -public class SparkDataSource extends BaseDataSource { - - private static final Logger logger = LoggerFactory.getLogger(SparkDataSource.class); - - /** - * gets the JDBC url for the data source connection - * @return - */ - @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase() + ";principal=" + getPrincipal(); - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += ";" + getOther(); - } - - return jdbcUrl; - } - - /** - * test whether the data source can be connected successfully - * @throws Exception - */ - @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName("org.apache.hive.jdbc.HiveDriver"); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), ""); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("Spark datasource try conn close conn error", e); - throw e; - } - } - } - - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/model/DateInterval.java b/escheduler-common/src/main/java/cn/escheduler/common/model/DateInterval.java deleted file mode 100644 index aaf64325c5..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/model/DateInterval.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.model; - -import java.util.Date; - -/** - * date interval class - */ -public class DateInterval { - - private Date startTime; - - private Date endTime; - - public DateInterval(Date beginTime, Date endTime){ - this.startTime = beginTime; - this.endTime = endTime; - - } - - @Override - public boolean equals(Object obj) { - try{ - DateInterval dateInterval = (DateInterval) obj; - return startTime.equals(dateInterval.getStartTime()) && - endTime.equals(dateInterval.getEndTime()); - }catch (Exception e){ - return false; - } - } - - public Date getStartTime() { - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public Date getEndTime() { - return endTime; - } - - public void setEndTime(Date endTime) { - this.endTime = endTime; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/model/DependentItem.java b/escheduler-common/src/main/java/cn/escheduler/common/model/DependentItem.java deleted file mode 100644 index 117a54db56..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/model/DependentItem.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.model; - -import cn.escheduler.common.enums.DependResult; - -/** - * dependent item - */ -public class DependentItem { - - private int definitionId; - private String depTasks; - private String cycle; - private String dateValue; - private DependResult dependResult; - - - public String getKey(){ - return String.format("%d-%s-%s-%s", - getDefinitionId(), - getDepTasks(), - getCycle(), - getDateValue()); - } - - public int getDefinitionId() { - return definitionId; - } - - public void setDefinitionId(int definitionId) { - this.definitionId = definitionId; - } - - public String getDepTasks() { - return depTasks; - } - - public void setDepTasks(String depTasks) { - this.depTasks = depTasks; - } - - public String getCycle() { - return cycle; - } - - public void setCycle(String cycle) { - this.cycle = cycle; - } - - public String getDateValue() { - return dateValue; - } - - public void setDateValue(String dateValue) { - this.dateValue = dateValue; - } - - public DependResult getDependResult() { - return dependResult; - } - - public void setDependResult(DependResult dependResult) { - this.dependResult = dependResult; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/model/DependentTaskModel.java b/escheduler-common/src/main/java/cn/escheduler/common/model/DependentTaskModel.java deleted file mode 100644 index b642b2e5b5..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/model/DependentTaskModel.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.model; - -import cn.escheduler.common.enums.DependentRelation; - -import java.util.List; - -public class DependentTaskModel { - - - private List dependItemList; - private DependentRelation relation; - - public List getDependItemList() { - return dependItemList; - } - - public void setDependItemList(List dependItemList) { - this.dependItemList = dependItemList; - } - - public DependentRelation getRelation() { - return relation; - } - - public void setRelation(DependentRelation relation) { - this.relation = relation; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/model/MasterServer.java b/escheduler-common/src/main/java/cn/escheduler/common/model/MasterServer.java deleted file mode 100644 index bb2f38cb14..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/model/MasterServer.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.model; - - -import java.util.Date; - -/** - * master server - */ -public class MasterServer { - - /** - * id - */ - private int id; - - /** - * host - */ - private String host; - - /** - * port - */ - private int port; - - /** - * master direcotry in zookeeper - */ - private String zkDirectory; - - /** - * resource info: CPU and memory - */ - private String resInfo; - - /** - * create time - */ - private Date createTime; - - /** - * laster heart beat time - */ - private Date lastHeartbeatTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public String getZkDirectory() { - return zkDirectory; - } - - public void setZkDirectory(String zkDirectory) { - this.zkDirectory = zkDirectory; - } - - public Date getLastHeartbeatTime() { - return lastHeartbeatTime; - } - - public void setLastHeartbeatTime(Date lastHeartbeatTime) { - this.lastHeartbeatTime = lastHeartbeatTime; - } - - public String getResInfo() { - return resInfo; - } - - public void setResInfo(String resInfo) { - this.resInfo = resInfo; - } - - @Override - public String toString() { - return "MasterServer{" + - "id=" + id + - ", host='" + host + '\'' + - ", port=" + port + - ", zkDirectory='" + zkDirectory + '\'' + - ", resInfo='" + resInfo + '\'' + - ", createTime=" + createTime + - ", lastHeartbeatTime=" + lastHeartbeatTime + - '}'; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/model/TaskNode.java b/escheduler-common/src/main/java/cn/escheduler/common/model/TaskNode.java deleted file mode 100644 index e09775558c..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/model/TaskNode.java +++ /dev/null @@ -1,324 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.model; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.Priority; -import cn.escheduler.common.enums.TaskTimeoutStrategy; -import cn.escheduler.common.task.TaskTimeoutParameter; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.JSONUtils; -import com.alibaba.fastjson.JSONObject; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import org.apache.commons.lang3.StringUtils; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - - -public class TaskNode { - - /** - * task node id - */ - private String id; - - /** - * task node name - */ - private String name; - - /** - * task node description - */ - private String desc; - - /** - * task node type - */ - private String type; - - /** - * the run flag has two states, NORMAL or FORBIDDEN - */ - private String runFlag; - - /** - * the front field - */ - private String loc; - - /** - * maximum number of retries - */ - private int maxRetryTimes; - - /** - * Unit of retry interval: points - */ - private int retryInterval; - - /** - * params information - */ - @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) - @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) - private String params; - - /** - * inner dependency information - */ - @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) - @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) - private String preTasks; - - /** - * users store additional information - */ - @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) - @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) - private String extras; - - /** - * node dependency list - */ - private List depList; - - /** - * outer dependency information - */ - @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) - @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) - private String dependence; - - /** - * task instance priority - */ - private Priority taskInstancePriority; - - /** - * worker group id - */ - private int workerGroupId; - - - /** - * task time out - */ - @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) - @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) - private String timeout; - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getDesc() { - return desc; - } - - public void setDesc(String desc) { - this.desc = desc; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public String getParams() { - return params; - } - - public void setParams(String params) { - this.params = params; - } - - public String getPreTasks() { - return preTasks; - } - - public void setPreTasks(String preTasks) throws IOException { - this.preTasks = preTasks; - this.depList = JSONUtils.toList(preTasks, String.class); - } - - public String getExtras() { - return extras; - } - - public void setExtras(String extras) { - this.extras = extras; - } - - public List getDepList() { - return depList; - } - - public void setDepList(List depList) throws JsonProcessingException { - this.depList = depList; - this.preTasks = JSONUtils.toJson(depList); - } - - public String getLoc() { - return loc; - } - - public void setLoc(String loc) { - this.loc = loc; - } - - public String getRunFlag(){ - return runFlag; - } - - public void setRunFlag(String runFlag) { - this.runFlag = runFlag; - } - - public Boolean isForbidden(){ - return (StringUtils.isNotEmpty(this.runFlag) && - this.runFlag.equals(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN)); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - TaskNode taskNode = (TaskNode) o; - return Objects.equals(name, taskNode.name) && - Objects.equals(desc, taskNode.desc) && - Objects.equals(type, taskNode.type) && - Objects.equals(params, taskNode.params) && - Objects.equals(preTasks, taskNode.preTasks) && - Objects.equals(extras, taskNode.extras) && - Objects.equals(runFlag, taskNode.runFlag) && - Objects.equals(dependence, taskNode.dependence) && - Objects.equals(workerGroupId, taskNode.workerGroupId) && - CollectionUtils.equalLists(depList, taskNode.depList); - } - - @Override - public int hashCode() { - return Objects.hash(name, desc, type, params, preTasks, extras, depList, runFlag); - } - - public String getDependence() { - return dependence; - } - - public void setDependence(String dependence) { - this.dependence = dependence; - } - - public int getMaxRetryTimes() { - return maxRetryTimes; - } - - public void setMaxRetryTimes(int maxRetryTimes) { - this.maxRetryTimes = maxRetryTimes; - } - - public int getRetryInterval() { - return retryInterval; - } - - public void setRetryInterval(int retryInterval) { - this.retryInterval = retryInterval; - } - - public Priority getTaskInstancePriority() { - return taskInstancePriority; - } - - public void setTaskInstancePriority(Priority taskInstancePriority) { - this.taskInstancePriority = taskInstancePriority; - } - - public String getTimeout() { - return timeout; - } - - public void setTimeout(String timeout) { - this.timeout = timeout; - } - - /** - * get task time out parameter - * @return - */ - public TaskTimeoutParameter getTaskTimeoutParameter() { - if(StringUtils.isNotEmpty(this.getTimeout())){ - String formatStr = String.format("%s,%s", TaskTimeoutStrategy.WARN.name(), TaskTimeoutStrategy.FAILED.name()); - String timeout = this.getTimeout().replace(formatStr,TaskTimeoutStrategy.WARNFAILED.name()); - return JSONObject.parseObject(timeout,TaskTimeoutParameter.class); - } - return new TaskTimeoutParameter(false); - } - - @Override - public String toString() { - return "TaskNode{" + - "id='" + id + '\'' + - ", name='" + name + '\'' + - ", desc='" + desc + '\'' + - ", type='" + type + '\'' + - ", runFlag='" + runFlag + '\'' + - ", loc='" + loc + '\'' + - ", maxRetryTimes=" + maxRetryTimes + - ", retryInterval=" + retryInterval + - ", params='" + params + '\'' + - ", preTasks='" + preTasks + '\'' + - ", extras='" + extras + '\'' + - ", depList=" + depList + - ", dependence='" + dependence + '\'' + - ", taskInstancePriority=" + taskInstancePriority + - ", timeout='" + timeout + '\'' + - ", workerGroupId='" + workerGroupId + '\'' + - '}'; - } - - public int getWorkerGroupId() { - return workerGroupId; - } - - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/model/TaskNodeRelation.java b/escheduler-common/src/main/java/cn/escheduler/common/model/TaskNodeRelation.java deleted file mode 100644 index 979d6d48ee..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/model/TaskNodeRelation.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.model; - -public class TaskNodeRelation { - - /** - * task start node name - */ - private String startNode; - - /** - * task end node name - */ - private String endNode; - - public TaskNodeRelation() { - } - - public TaskNodeRelation(String startNode, String endNode) { - this.startNode = startNode; - this.endNode = endNode; - } - - public String getStartNode() { - return startNode; - } - - public void setStartNode(String startNode) { - this.startNode = startNode; - } - - public String getEndNode() { - return endNode; - } - - public void setEndNode(String endNode) { - this.endNode = endNode; - } - - - public boolean equals(TaskNodeRelation e){ - return (e.getStartNode() == this.startNode && e.getEndNode() == this.endNode); - } - - @Override - public String toString() { - return "TaskNodeRelation{" + - "startNode='" + startNode + '\'' + - ", endNode='" + endNode + '\'' + - '}'; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/process/HttpProperty.java b/escheduler-common/src/main/java/cn/escheduler/common/process/HttpProperty.java deleted file mode 100644 index ed7cc46a38..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/process/HttpProperty.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.process; - -import cn.escheduler.common.enums.HttpParametersType; - -import java.util.Objects; - -public class HttpProperty { - /** - * key - */ - private String prop; - - /** - * httpParametersType - */ - private HttpParametersType httpParametersType; - - /** - * value - */ - private String value; - - public HttpProperty() { - } - - public HttpProperty(String prop, HttpParametersType httpParametersType, String value) { - this.prop = prop; - this.httpParametersType = httpParametersType; - this.value = value; - } - - /** - * getter method - * - * @return the prop - * @see HttpProperty#prop - */ - public String getProp() { - return prop; - } - - /** - * setter method - * - * @param prop the prop to set - * @see HttpProperty#prop - */ - public void setProp(String prop) { - this.prop = prop; - } - - /** - * getter method - * - * @return the value - * @see HttpProperty#value - */ - public String getValue() { - return value; - } - - /** - * setter method - * - * @param value the value to set - * @see HttpProperty#value - */ - public void setValue(String value) { - this.value = value; - } - - public HttpParametersType getHttpParametersType() { - return httpParametersType; - } - - public void setHttpParametersType(HttpParametersType httpParametersType) { - this.httpParametersType = httpParametersType; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - HttpProperty property = (HttpProperty) o; - return Objects.equals(prop, property.prop) && - Objects.equals(value, property.value); - } - - - @Override - public int hashCode() { - return Objects.hash(prop, value); - } - - @Override - public String toString() { - return "HttpProperty{" + - "prop='" + prop + '\'' + - ", httpParametersType=" + httpParametersType + - ", value='" + value + '\'' + - '}'; - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/process/ProcessDag.java b/escheduler-common/src/main/java/cn/escheduler/common/process/ProcessDag.java deleted file mode 100644 index e88f4a052a..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/process/ProcessDag.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.process; - - - -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.model.TaskNodeRelation; - -import java.util.List; - -public class ProcessDag { - - /** - * DAG edge list - **/ - private List edges; - - /** - * DAG node list - */ - private List nodes; - - /** - * getter method - * - * @return the edges - * @see ProcessDag#edges - */ - public List getEdges() { - return edges; - } - - /** - * setter method - * - * @param edges the edges to set - * @see ProcessDag#edges - */ - public void setEdges(List edges) { - this.edges = edges; - } - - /** - * getter method - * - * @return the nodes - * @see ProcessDag#nodes - */ - public List getNodes() { - return nodes; - } - - /** - * setter method - * - * @param nodes the nodes to set - * @see ProcessDag#nodes - */ - public void setNodes(List nodes) { - this.nodes = nodes; - } - - @Override - public String toString() { - return "ProcessDag{" + - "edges=" + edges + - ", nodes=" + nodes + - '}'; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/process/Property.java b/escheduler-common/src/main/java/cn/escheduler/common/process/Property.java deleted file mode 100644 index cf95a5a26c..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/process/Property.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.process; - - -import cn.escheduler.common.enums.DataType; -import cn.escheduler.common.enums.Direct; - -import java.util.Objects; - -public class Property { - /** - * key - */ - private String prop; - - /** - * input/output - */ - private Direct direct; - - /** - * data type - */ - private DataType type; - - /** - * value - */ - private String value; - - public Property() { - } - - public Property(String prop,Direct direct,DataType type,String value) { - this.prop = prop; - this.direct = direct; - this.type = type; - this.value = value; - } - - /** - * getter method - * - * @return the prop - * @see Property#prop - */ - public String getProp() { - return prop; - } - - /** - * setter method - * - * @param prop the prop to set - * @see Property#prop - */ - public void setProp(String prop) { - this.prop = prop; - } - - /** - * getter method - * - * @return the value - * @see Property#value - */ - public String getValue() { - return value; - } - - /** - * setter method - * - * @param value the value to set - * @see Property#value - */ - public void setValue(String value) { - this.value = value; - } - - - public Direct getDirect() { - return direct; - } - - public void setDirect(Direct direct) { - this.direct = direct; - } - - public DataType getType() { - return type; - } - - public void setType(DataType type) { - this.type = type; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - Property property = (Property) o; - return Objects.equals(prop, property.prop) && - Objects.equals(value, property.value); - } - - - @Override - public int hashCode() { - return Objects.hash(prop, value); - } - - @Override - public String toString() { - return "Property{" + - "prop='" + prop + '\'' + - ", direct=" + direct + - ", type=" + type + - ", value='" + value + '\'' + - '}'; - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/process/ResourceInfo.java b/escheduler-common/src/main/java/cn/escheduler/common/process/ResourceInfo.java deleted file mode 100644 index e6c64756c5..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/process/ResourceInfo.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.process; - -/** - * resource info - */ -public class ResourceInfo { - /** - * res the name of the resource that was uploaded - */ - private String res; - - public String getRes() { - return res; - } - - public void setRes(String res) { - this.res = res; - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/queue/ITaskQueue.java b/escheduler-common/src/main/java/cn/escheduler/common/queue/ITaskQueue.java deleted file mode 100644 index 6f6e979797..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/queue/ITaskQueue.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.queue; - -import java.util.List; -import java.util.Set; - -public interface ITaskQueue { - - /** - * take out all the elements - * - * - * @param key - * @return - */ - List getAllTasks(String key); - - /** - * check task exists in the task queue or not - * - * @param key queue name - * @param task ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - * @return true if exists in the queue - */ - boolean checkTaskExists(String key, String task); - - /** - * add an element to the queue - * - * @param key queue name - * @param value - */ - void add(String key, String value); - - /** - * an element pops out of the queue - * - * @param key queue name - * @param n how many elements to poll - * @return - */ - List poll(String key, int n); - - /** - * remove a element from queue - * @param key - * @param value - */ - void removeNode(String key, String value); - - /** - * add an element to the set - * - * @param key - * @param value - */ - void sadd(String key, String value); - - /** - * delete the value corresponding to the key in the set - * - * @param key - * @param value - */ - void srem(String key, String value); - - /** - * gets all the elements of the set based on the key - * - * @param key - * @return - */ - Set smembers(String key); - - - /** - * clear the task queue for use by junit tests only - */ - void delete(); -} \ No newline at end of file diff --git a/escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueFactory.java b/escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueFactory.java deleted file mode 100644 index 3cc99142eb..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueFactory.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.queue; - -import cn.escheduler.common.utils.CommonUtils; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * task queue factory - */ -public class TaskQueueFactory { - - private static final Logger logger = LoggerFactory.getLogger(TaskQueueFactory.class); - - - private TaskQueueFactory(){ - - } - - - /** - * get instance (singleton) - * - * @return instance - */ - public static ITaskQueue getTaskQueueInstance() { - String queueImplValue = CommonUtils.getQueueImplValue(); - if (StringUtils.isNotBlank(queueImplValue)) { - logger.info("task queue impl use zookeeper "); - return TaskQueueZkImpl.getInstance(); - }else{ - logger.error("property escheduler.queue.impl can't be blank, system will exit "); - System.exit(-1); - } - - return null; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueZkImpl.java b/escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueZkImpl.java deleted file mode 100644 index 2febb6ee13..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/queue/TaskQueueZkImpl.java +++ /dev/null @@ -1,439 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.queue; - - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.utils.Bytes; -import cn.escheduler.common.utils.IpUtils; -import cn.escheduler.common.utils.OSUtils; -import cn.escheduler.common.zk.AbstractZKClient; -import org.apache.curator.framework.CuratorFramework; -import org.apache.zookeeper.CreateMode; -import org.apache.zookeeper.data.Stat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A singleton of a task queue implemented with zookeeper - * tasks queue implemention - */ -public class TaskQueueZkImpl extends AbstractZKClient implements ITaskQueue { - - private static final Logger logger = LoggerFactory.getLogger(TaskQueueZkImpl.class); - - private static volatile TaskQueueZkImpl instance; - - private TaskQueueZkImpl(){ - init(); - } - - public static TaskQueueZkImpl getInstance(){ - if (null == instance) { - synchronized (TaskQueueZkImpl.class) { - if(null == instance) { - instance = new TaskQueueZkImpl(); - } - } - } - return instance; - } - - - /** - * get all tasks from tasks queue - * @param key task queue name - * @return - */ - @Override - public List getAllTasks(String key) { - try { - List list = getZkClient().getChildren().forPath(getTasksPath(key)); - - return list; - } catch (Exception e) { - logger.error("get all tasks from tasks queue exception",e); - } - - return new ArrayList(); - } - - /** - * check task exists in the task queue or not - * - * @param key queue name - * @param task ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - * @return true if exists in the queue - */ - @Override - public boolean checkTaskExists(String key, String task) { - String taskPath = getTasksPath(key) + Constants.SINGLE_SLASH + task; - - try { - Stat stat = zkClient.checkExists().forPath(taskPath); - - if(null == stat){ - logger.info("check task:{} not exist in task queue",task); - return false; - }else{ - logger.info("check task {} exists in task queue ",task); - return true; - } - - } catch (Exception e) { - logger.info(String.format("task {} check exists in task queue exception ", task), e); - } - - return false; - } - - - /** - * add task to tasks queue - * - * @param key task queue name - * @param value ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_host1,host2,... - */ - @Override - public void add(String key, String value) { - try { - String taskIdPath = getTasksPath(key) + Constants.SINGLE_SLASH + value; - String result = getZkClient().create().withMode(CreateMode.PERSISTENT).forPath(taskIdPath, Bytes.toBytes(value)); - - logger.info("add task : {} to tasks queue , result success",result); - } catch (Exception e) { - logger.error("add task to tasks queue exception",e); - } - - } - - - /** - * An element pops out of the queue

- * note: - * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_host1,host2,... - * The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low. - * - * 流程优先级_流程实例id_任务优先级_任务id_任务执行的机器id1,任务执行的机器id2,... high <- low - * @param key task queue name - * @param tasksNum how many elements to poll - * @return the task ids to be executed - */ - @Override - public List poll(String key, int tasksNum) { - try{ - CuratorFramework zk = getZkClient(); - String tasksQueuePath = getTasksPath(key) + Constants.SINGLE_SLASH; - List list = zk.getChildren().forPath(getTasksPath(key)); - - if(list != null && list.size() > 0){ - - String workerIp = OSUtils.getHost(); - String workerIpLongStr = String.valueOf(IpUtils.ipToLong(workerIp)); - - int size = list.size(); - - - Set taskTreeSet = new TreeSet<>(new Comparator() { - @Override - public int compare(String o1, String o2) { - - String s1 = o1; - String s2 = o2; - String[] s1Array = s1.split(Constants.UNDERLINE); - if(s1Array.length>4){ - // warning: if this length > 5, need to be changed - s1 = s1.substring(0, s1.lastIndexOf(Constants.UNDERLINE) ); - } - - String[] s2Array = s2.split(Constants.UNDERLINE); - if(s2Array.length>4){ - // warning: if this length > 5, need to be changed - s2 = s2.substring(0, s2.lastIndexOf(Constants.UNDERLINE) ); - } - - return s1.compareTo(s2); - } - }); - - for (int i = 0; i < size; i++) { - - String taskDetail = list.get(i); - String[] taskDetailArrs = taskDetail.split(Constants.UNDERLINE); - - //forward compatibility 向前版本兼容 - if(taskDetailArrs.length >= 4){ - - //format ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - String formatTask = String.format("%s_%010d_%s_%010d", taskDetailArrs[0], Long.parseLong(taskDetailArrs[1]), taskDetailArrs[2], Long.parseLong(taskDetailArrs[3])); - if(taskDetailArrs.length > 4){ - String taskHosts = taskDetailArrs[4]; - - //task can assign to any worker host if equals default ip value of worker server - if(!taskHosts.equals(String.valueOf(Constants.DEFAULT_WORKER_ID))){ - String[] taskHostsArr = taskHosts.split(Constants.COMMA); - if(!Arrays.asList(taskHostsArr).contains(workerIpLongStr)){ - continue; - } - } - formatTask += Constants.UNDERLINE + taskDetailArrs[4]; - } - taskTreeSet.add(formatTask); - - } - - } - - List taskslist = getTasksListFromTreeSet(tasksNum, taskTreeSet); - - logger.info("consume tasks: {},there still have {} tasks need to be executed", Arrays.toString(taskslist.toArray()), size - taskslist.size()); - - return taskslist; - }else{ - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } - - } catch (Exception e) { - logger.error("add task to tasks queue exception",e); - } - return new ArrayList(); - } - - - /** - * get task list from tree set - * - * @param tasksNum - * @param taskTreeSet - */ - public List getTasksListFromTreeSet(int tasksNum, Set taskTreeSet) { - Iterator iterator = taskTreeSet.iterator(); - int j = 0; - List taskslist = new ArrayList<>(tasksNum); - while(iterator.hasNext()){ - if(j++ >= tasksNum){ - break; - } - String task = iterator.next(); - taskslist.add(getOriginTaskFormat(task)); - } - return taskslist; - } - - /** - * format ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - * processInstanceId and task id need to be convert to int. - * @param formatTask - * @return - */ - private String getOriginTaskFormat(String formatTask){ - String[] taskArray = formatTask.split(Constants.UNDERLINE); - if(taskArray.length< 4){ - return formatTask; - } - int processInstanceId = Integer.parseInt(taskArray[1]); - int taskId = Integer.parseInt(taskArray[3]); - - StringBuilder sb = new StringBuilder(50); - String destTask = String.format("%s_%s_%s_%s", taskArray[0], processInstanceId, taskArray[2], taskId); - - sb.append(destTask); - - if(taskArray.length > 4){ - for(int index = 4; index < taskArray.length; index++){ - sb.append(Constants.UNDERLINE).append(taskArray[index]); - } - } - return sb.toString(); - } - - @Override - public void removeNode(String key, String nodeValue){ - - CuratorFramework zk = getZkClient(); - String tasksQueuePath = getTasksPath(key) + Constants.SINGLE_SLASH; - String taskIdPath = tasksQueuePath + nodeValue; - logger.info("consume task {}", taskIdPath); - try{ - Stat stat = zk.checkExists().forPath(taskIdPath); - if(stat != null){ - zk.delete().forPath(taskIdPath); - } - }catch(Exception e){ - logger.error(String.format("delete task:%s from zookeeper fail, exception:" ,nodeValue) ,e); - } - - } - - - - /** - * In order to be compatible with redis implementation - * - * To be compatible with the redis implementation, add an element to the set - * @param key The key is the kill/cancel queue path name - * @param value host-taskId The name of the zookeeper node - */ - @Override - public void sadd(String key,String value) { - try { - - if(value != null && value.trim().length() > 0){ - String path = getTasksPath(key) + Constants.SINGLE_SLASH; - CuratorFramework zk = getZkClient(); - Stat stat = zk.checkExists().forPath(path + value); - - if(null == stat){ - String result = zk.create().withMode(CreateMode.PERSISTENT).forPath(path + value,Bytes.toBytes(value)); - logger.info("add task:{} to tasks set result:{} ",value,result); - }else{ - logger.info("task {} exists in tasks set ",value); - } - - }else{ - logger.warn("add host-taskId:{} to tasks set is empty ",value); - } - - } catch (Exception e) { - logger.error("add task to tasks set exception",e); - } - } - - - /** - * delete the value corresponding to the key in the set - * @param key The key is the kill/cancel queue path name - * @param value host-taskId-taskType The name of the zookeeper node - */ - @Override - public void srem(String key, String value) { - try{ - String path = getTasksPath(key) + Constants.SINGLE_SLASH; - CuratorFramework zk = getZkClient(); - Stat stat = zk.checkExists().forPath(path + value); - - if(null != stat){ - zk.delete().forPath(path + value); - logger.info("delete task:{} from tasks set ",value); - }else{ - logger.info("delete task:{} from tasks set fail, there is no this task",value); - } - - }catch(Exception e){ - logger.error(String.format("delete task:" + value + " exception"),e); - } - } - - - /** - * Gets all the elements of the set based on the key - * @param key The key is the kill/cancel queue path name - * @return - */ - @Override - public Set smembers(String key) { - - Set tasksSet = new HashSet<>(); - - try { - List list = getZkClient().getChildren().forPath(getTasksPath(key)); - - for (String task : list) { - tasksSet.add(task); - } - - return tasksSet; - } catch (Exception e) { - logger.error("get all tasks from tasks queue exception",e); - } - - return tasksSet; - } - - - - /** - * Init the task queue of zookeeper node - */ - private void init(){ - try { - String tasksQueuePath = getTasksPath(Constants.SCHEDULER_TASKS_QUEUE); - String tasksCancelPath = getTasksPath(Constants.SCHEDULER_TASKS_KILL); - - for(String taskQueuePath : new String[]{tasksQueuePath,tasksCancelPath}){ - if(zkClient.checkExists().forPath(taskQueuePath) == null){ - // create a persistent parent node - zkClient.create().creatingParentContainersIfNeeded() - .withMode(CreateMode.PERSISTENT).forPath(taskQueuePath); - logger.info("create tasks queue parent node success : {} ",taskQueuePath); - } - } - - } catch (Exception e) { - logger.error("create zk node failure",e); - } - } - - - /** - * Clear the task queue of zookeeper node - */ - @Override - public void delete(){ - try { - String tasksQueuePath = getTasksPath(Constants.SCHEDULER_TASKS_QUEUE); - String tasksCancelPath = getTasksPath(Constants.SCHEDULER_TASKS_KILL); - - for(String taskQueuePath : new String[]{tasksQueuePath,tasksCancelPath}){ - if(zkClient.checkExists().forPath(taskQueuePath) != null){ - - List list = zkClient.getChildren().forPath(taskQueuePath); - - for (String task : list) { - zkClient.delete().forPath(taskQueuePath + Constants.SINGLE_SLASH + task); - logger.info("delete task from tasks queue : {}/{} ",taskQueuePath,task); - - } - - } - } - - } catch (Exception e) { - logger.error("delete all tasks in tasks queue failure",e); - } - } - - /** - * Get the task queue path - * @param key task queue name - * @return - */ - public String getTasksPath(String key){ - return conf.getString(Constants.ZOOKEEPER_SCHEDULER_ROOT) + Constants.SINGLE_SLASH + key; - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/shell/AbstractShell.java b/escheduler-common/src/main/java/cn/escheduler/common/shell/AbstractShell.java deleted file mode 100644 index 0880c4d5bb..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/shell/AbstractShell.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.shell; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.io.InputStreamReader; -import java.util.Map; -import java.util.Set; -import java.util.Timer; -import java.util.TimerTask; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicBoolean; - - -/** - * A base class for running a Unix command. - * - * AbstractShell can be used to run unix commands like du or - * df. It also offers facilities to gate commands by - * time-intervals. - */ -public abstract class AbstractShell { - - private static final Logger logger = LoggerFactory.getLogger(AbstractShell.class); - - - - /** - * Time after which the executing script would be timedout - */ - protected long timeOutInterval = 0L; - /** - * If or not script timed out - */ - private AtomicBoolean timedOut; - - /** - * refresh interval in msec - */ - private long interval; - - /** - * last time the command was performed - */ - private long lastTime; - - /** - * env for the command execution - */ - private Map environment; - private File dir; - - /** - * sub process used to execute the command - */ - private Process process; - private int exitCode; - - /** - * If or not script finished executing - */ - private volatile AtomicBoolean completed; - - public AbstractShell() { - this(0L); - } - - /** - * @param interval the minimum duration to wait before re-executing the - * command. - */ - public AbstractShell(long interval ) { - this.interval = interval; - this.lastTime = (interval<0) ? 0 : -interval; - } - - - - /** - * set the environment for the command - * @param env Mapping of environment variables - */ - protected void setEnvironment(Map env) { - this.environment = env; - } - - /** - * set the working directory - * @param dir The directory where the command would be executed - */ - protected void setWorkingDirectory(File dir) { - this.dir = dir; - } - - /** - * check to see if a command needs to be executed and execute if needed - */ - protected void run() throws IOException { - if (lastTime + interval > System.currentTimeMillis()) { - return; - } - // reset for next run - exitCode = 0; - runCommand(); - } - - - /** - * Run a command actual work - */ - private void runCommand() throws IOException { - ProcessBuilder builder = new ProcessBuilder(getExecString()); - Timer timeOutTimer = null; - ShellTimeoutTimerTask timeoutTimerTask = null; - timedOut = new AtomicBoolean(false); - completed = new AtomicBoolean(false); - - if (environment != null) { - builder.environment().putAll(this.environment); - } - if (dir != null) { - builder.directory(this.dir); - } - - process = builder.start(); - ProcessContainer.putProcess(process); - - if (timeOutInterval > 0) { - timeOutTimer = new Timer(); - timeoutTimerTask = new ShellTimeoutTimerTask( - this); - //One time scheduling. - timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); - } - final BufferedReader errReader = - new BufferedReader(new InputStreamReader(process - .getErrorStream())); - BufferedReader inReader = - new BufferedReader(new InputStreamReader(process - .getInputStream())); - final StringBuilder errMsg = new StringBuilder(); - - // read error and input streams as this would free up the buffers - // free the error stream buffer - Thread errThread = new Thread() { - @Override - public void run() { - try { - String line = errReader.readLine(); - while((line != null) && !isInterrupted()) { - errMsg.append(line); - errMsg.append(System.getProperty("line.separator")); - line = errReader.readLine(); - } - } catch(IOException ioe) { - logger.warn("Error reading the error stream", ioe); - } - } - }; - try { - errThread.start(); - } catch (IllegalStateException ise) { } - try { - // parse the output - parseExecResult(inReader); - exitCode = process.waitFor(); - try { - // make sure that the error thread exits - errThread.join(); - } catch (InterruptedException ie) { - logger.warn("Interrupted while reading the error stream", ie); - } - completed.set(true); - //the timeout thread handling - //taken care in finally block - if (exitCode != 0) { - throw new ExitCodeException(exitCode, errMsg.toString()); - } - } catch (InterruptedException ie) { - throw new IOException(ie.toString()); - } finally { - if ((timeOutTimer!=null) && !timedOut.get()) { - timeOutTimer.cancel(); - } - // close the input stream - try { - inReader.close(); - } catch (IOException ioe) { - logger.warn("Error while closing the input stream", ioe); - } - if (!completed.get()) { - errThread.interrupt(); - } - try { - errReader.close(); - } catch (IOException ioe) { - logger.warn("Error while closing the error stream", ioe); - } - ProcessContainer.removeProcess(process); - process.destroy(); - lastTime = System.currentTimeMillis(); - } - } - - /** - * return an array containing the command name & its parameters - * */ - protected abstract String[] getExecString(); - - /** - * Parse the execution result - * */ - protected abstract void parseExecResult(BufferedReader lines) - throws IOException; - - /** - * get the current sub-process executing the given command - * @return process executing the command - */ - public Process getProcess() { - return process; - } - - /** get the exit code - * @return the exit code of the process - */ - public int getExitCode() { - return exitCode; - } - - /** - * Set if the command has timed out. - * - */ - private void setTimedOut() { - this.timedOut.set(true); - } - - - - /** - * Timer which is used to timeout scripts spawned off by shell. - */ - private static class ShellTimeoutTimerTask extends TimerTask { - - private AbstractShell shell; - - public ShellTimeoutTimerTask(AbstractShell shell) { - this.shell = shell; - } - - @Override - public void run() { - Process p = shell.getProcess(); - try { - p.exitValue(); - } catch (Exception e) { - //Process has not terminated. - //So check if it has completed - //if not just destroy it. - if (p != null && !shell.completed.get()) { - shell.setTimedOut(); - p.destroy(); - } - } - } - } - - /** - * This is an IOException with exit code added. - */ - public static class ExitCodeException extends IOException { - int exitCode; - - public ExitCodeException(int exitCode, String message) { - super(message); - this.exitCode = exitCode; - } - - public int getExitCode() { - return exitCode; - } - } - - /** - * process manage container - * - */ - public static class ProcessContainer extends ConcurrentHashMap{ - private static final ProcessContainer container = new ProcessContainer(); - private ProcessContainer(){ - super(); - } - public static final ProcessContainer getInstance(){ - return container; - } - - public static void putProcess(Process process){ - getInstance().put(process.hashCode(), process); - } - public static int processSize(){ - return getInstance().size(); - } - - public static void removeProcess(Process process){ - getInstance().remove(process.hashCode()); - } - - public static void destroyAllProcess(){ - Set> set = getInstance().entrySet(); - for (Entry entry : set) { - try{ - entry.getValue().destroy(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - logger.info("close " + set.size() + " executing process tasks"); - } - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/shell/ShellExecutor.java b/escheduler-common/src/main/java/cn/escheduler/common/shell/ShellExecutor.java deleted file mode 100644 index 1a23f9655e..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/shell/ShellExecutor.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.shell; - -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.util.Map; - -/** - * shell command executor. - * - * ShellExecutor should be used in cases where the output - * of the command needs no explicit parsing and where the command, working - * directory and the environment remains unchanged. The output of the command - * is stored as-is and is expected to be small. - */ -public class ShellExecutor extends AbstractShell { - - private String[] command; - private StringBuffer output; - - - public ShellExecutor(String... execString) { - this(execString, null); - } - - public ShellExecutor(String[] execString, File dir) { - this(execString, dir, null); - } - - public ShellExecutor(String[] execString, File dir, - Map env) { - this(execString, dir, env , 0L); - } - - /** - * Create a new instance of the ShellExecutor to execute a command. - * - * @param execString The command to execute with arguments - * @param dir If not-null, specifies the directory which should be set - * as the current working directory for the command. - * If null, the current working directory is not modified. - * @param env If not-null, environment of the command will include the - * key-value pairs specified in the map. If null, the current - * environment is not modified. - * @param timeout Specifies the time in milliseconds, after which the - * command will be killed and the status marked as timedout. - * If 0, the command will not be timed out. - */ - public ShellExecutor(String[] execString, File dir, - Map env, long timeout) { - command = execString.clone(); - if (dir != null) { - setWorkingDirectory(dir); - } - if (env != null) { - setEnvironment(env); - } - timeOutInterval = timeout; - } - - - /** - * Static method to execute a shell command. - * Covers most of the simple cases without requiring the user to implement - * the AbstractShell interface. - * @param cmd shell command to execute. - * @return the output of the executed command. - */ - public static String execCommand(String... cmd) throws IOException { - return execCommand(null, cmd, 0L); - } - - /** - * Static method to execute a shell command. - * Covers most of the simple cases without requiring the user to implement - * the AbstractShell interface. - * @param env the map of environment key=value - * @param cmd shell command to execute. - * @param timeout time in milliseconds after which script should be marked timeout - * @return the output of the executed command.o - */ - - public static String execCommand(Map env, String[] cmd, - long timeout) throws IOException { - ShellExecutor exec = new ShellExecutor(cmd, null, env, - timeout); - exec.execute(); - return exec.getOutput(); - } - - /** - * Static method to execute a shell command. - * Covers most of the simple cases without requiring the user to implement - * the AbstractShell interface. - * @param env the map of environment key=value - * @param cmd shell command to execute. - * @return the output of the executed command. - */ - public static String execCommand(Map env, String ... cmd) - throws IOException { - return execCommand(env, cmd, 0L); - } - - /** - * Execute the shell command - * - */ - public void execute() throws IOException { - this.run(); - } - - @Override - protected String[] getExecString() { - return command; - } - - @Override - protected void parseExecResult(BufferedReader lines) throws IOException { - output = new StringBuffer(); - char[] buf = new char[1024]; - int nRead; - String line = ""; - while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) { - line = new String(buf,0,nRead); - } - output.append(line); - } - - /** - * - * Get the output of the shell command - */ - public String getOutput() { - return (output == null) ? "" : output.toString(); - } - - - /** - * Returns the commands of this instance. - * Arguments with spaces in are presented with quotes round; other - * arguments are presented raw - * - * @return a string representation of the object - */ - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - String[] args = getExecString(); - for (String s : args) { - if (s.indexOf(' ') >= 0) { - builder.append('"').append(s).append('"'); - } else { - builder.append(s); - } - builder.append(' '); - } - return builder.toString(); - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/AbstractParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/AbstractParameters.java deleted file mode 100644 index 596bfcc87e..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/AbstractParameters.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task; - -import cn.escheduler.common.process.Property; - -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -/** - * job params related class - */ -public abstract class AbstractParameters implements IParameters { - - @Override - public abstract boolean checkParameters(); - - @Override - public abstract List getResourceFilesList(); - - /** - * local parameters - */ - public List localParams; - - /** - * get local parameters list - * @return - */ - public List getLocalParams() { - return localParams; - } - - public void setLocalParams(List localParams) { - this.localParams = localParams; - } - - /** - * get local parameters map - * @return - */ - public Map getLocalParametersMap() { - if (localParams != null) { - Map localParametersMaps = new LinkedHashMap<>(); - - for (Property property : localParams) { - localParametersMaps.put(property.getProp(),property); - } - return localParametersMaps; - } - return null; - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/IParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/IParameters.java deleted file mode 100644 index c4dbd6d421..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/IParameters.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task; - -import java.util.List; - -/** - * job params interface - */ -public interface IParameters { - /** - * check parameters is valid - * - * @return - */ - boolean checkParameters(); - - /** - * get project resource files list - * - * @return resource files list - */ - List getResourceFilesList(); -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/TaskTimeoutParameter.java b/escheduler-common/src/main/java/cn/escheduler/common/task/TaskTimeoutParameter.java deleted file mode 100644 index a2a2fb5f8c..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/TaskTimeoutParameter.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task; - -import cn.escheduler.common.enums.TaskTimeoutStrategy; - -/** - * task timeout parameter - */ -public class TaskTimeoutParameter { - - private boolean enable; - /** - * task timeout strategy - */ - private TaskTimeoutStrategy strategy; - /** - * task timeout interval - */ - private int interval; - - public boolean getEnable() { - return enable; - } - - public void setEnable(boolean enable) { - this.enable = enable; - } - - public TaskTimeoutStrategy getStrategy() { - return strategy; - } - - public void setStrategy(TaskTimeoutStrategy strategy) { - this.strategy = strategy; - } - - public int getInterval() { - return interval; - } - - public void setInterval(int interval) { - this.interval = interval; - } - - public TaskTimeoutParameter() { - } - - public TaskTimeoutParameter(boolean enable) { - this.enable = enable; - } - - public TaskTimeoutParameter(boolean enable, TaskTimeoutStrategy strategy, int interval) { - this.enable = enable; - this.strategy = strategy; - this.interval = interval; - } - - @Override - public String toString() { - return "TaskTimeoutParameter{" + - "enable=" + enable + - ", strategy=" + strategy + - ", interval=" + interval + - '}'; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/dependent/DependentParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/dependent/DependentParameters.java deleted file mode 100644 index 87143cdfb8..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/dependent/DependentParameters.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.dependent; - -import cn.escheduler.common.enums.DependentRelation; -import cn.escheduler.common.model.DependentTaskModel; -import cn.escheduler.common.task.AbstractParameters; - -import java.util.ArrayList; -import java.util.List; - -public class DependentParameters extends AbstractParameters { - - private List dependTaskList; - private DependentRelation relation; - - - - @Override - public boolean checkParameters() { - return true; - } - - @Override - public List getResourceFilesList() { - return new ArrayList<>(); - } - - public List getDependTaskList() { - return dependTaskList; - } - - public void setDependTaskList(List dependTaskList) { - this.dependTaskList = dependTaskList; - } - - public DependentRelation getRelation() { - return relation; - } - - public void setRelation(DependentRelation relation) { - this.relation = relation; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/flink/FlinkParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/flink/FlinkParameters.java deleted file mode 100644 index 54dfcb7103..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/flink/FlinkParameters.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.flink; - -import cn.escheduler.common.enums.ProgramType; -import cn.escheduler.common.process.ResourceInfo; -import cn.escheduler.common.task.AbstractParameters; - -import java.util.List; -import java.util.stream.Collectors; - -/** - * spark parameters - */ -public class FlinkParameters extends AbstractParameters { - - /** - * major jar - */ - private ResourceInfo mainJar; - - /** - * major class - */ - private String mainClass; - - /** - * deploy mode yarn-cluster yarn-client yarn-local - */ - private String deployMode; - - /** - * arguments - */ - private String mainArgs; - - /** - * slot个数 - */ - private int slot; - - /** - *Yarn application的名字 - */ - - private String appName; - - /** - * taskManager 数量 - */ - private int taskManager; - - /** - * jobManagerMemory 内存大小 - */ - private String jobManagerMemory ; - - /** - * taskManagerMemory内存大小 - */ - private String taskManagerMemory; - - /** - * resource list - */ - private List resourceList; - - /** - * The YARN queue to submit to - */ - private String queue; - - /** - * other arguments - */ - private String others; - - /** - * program type - * 0 JAVA,1 SCALA,2 PYTHON - */ - private ProgramType programType; - - public ResourceInfo getMainJar() { - return mainJar; - } - - public void setMainJar(ResourceInfo mainJar) { - this.mainJar = mainJar; - } - - public String getMainClass() { - return mainClass; - } - - public void setMainClass(String mainClass) { - this.mainClass = mainClass; - } - - public String getDeployMode() { - return deployMode; - } - - public void setDeployMode(String deployMode) { - this.deployMode = deployMode; - } - - public String getMainArgs() { - return mainArgs; - } - - public void setMainArgs(String mainArgs) { - this.mainArgs = mainArgs; - } - - public int getSlot() { - return slot; - } - - public void setSlot(int slot) { - this.slot = slot; - } - - public String getAppName() { - return appName; - } - - public void setAppName(String appName) { - this.appName = appName; - } - - public int getTaskManager() { - return taskManager; - } - - public void setTaskManager(int taskManager) { - this.taskManager = taskManager; - } - - public String getJobManagerMemory() { - return jobManagerMemory; - } - - public void setJobManagerMemory(String jobManagerMemory) { - this.jobManagerMemory = jobManagerMemory; - } - - public String getTaskManagerMemory() { - return taskManagerMemory; - } - - public void setTaskManagerMemory(String taskManagerMemory) { - this.taskManagerMemory = taskManagerMemory; - } - - public String getQueue() { - return queue; - } - - public void setQueue(String queue) { - this.queue = queue; - } - - public List getResourceList() { - return resourceList; - } - - public void setResourceList(List resourceList) { - this.resourceList = resourceList; - } - - public String getOthers() { - return others; - } - - public void setOthers(String others) { - this.others = others; - } - - public ProgramType getProgramType() { - return programType; - } - - public void setProgramType(ProgramType programType) { - this.programType = programType; - } - - @Override - public boolean checkParameters() { - return mainJar != null && programType != null; - } - - - @Override - public List getResourceFilesList() { - if(resourceList !=null ) { - this.resourceList.add(mainJar); - return resourceList.stream() - .map(p -> p.getRes()).collect(Collectors.toList()); - } - return null; - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/http/HttpParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/http/HttpParameters.java deleted file mode 100644 index dc23f738b8..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/http/HttpParameters.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.http; - -import cn.escheduler.common.enums.HttpCheckCondition; -import cn.escheduler.common.enums.HttpMethod; -import cn.escheduler.common.process.HttpProperty; -import cn.escheduler.common.task.AbstractParameters; -import org.apache.commons.lang.StringUtils; - -import java.util.ArrayList; -import java.util.List; - -/** - * http parameter - */ -public class HttpParameters extends AbstractParameters { - /** - * url - */ - private String url; - - /** - * httpMethod - */ - private HttpMethod httpMethod; - - /** - * http params - */ - private List httpParams; - - /** - * httpCheckCondition - */ - private HttpCheckCondition httpCheckCondition = HttpCheckCondition.STATUS_CODE_DEFAULT; - - /** - * condition - */ - private String condition; - - - - @Override - public boolean checkParameters() { - return StringUtils.isNotEmpty(url); - } - - @Override - public List getResourceFilesList() { - return new ArrayList<>(); - } - - public String getUrl() { - return url; - } - - public void setUrl(String url) { - this.url = url; - } - - public HttpMethod getHttpMethod() { - return httpMethod; - } - - public void setHttpMethod(HttpMethod httpMethod) { - this.httpMethod = httpMethod; - } - - public List getHttpParams() { - return httpParams; - } - - public void setHttpParams(List httpParams) { - this.httpParams = httpParams; - } - - public HttpCheckCondition getHttpCheckCondition() { - return httpCheckCondition; - } - - public void setHttpCheckCondition(HttpCheckCondition httpCheckCondition) { - this.httpCheckCondition = httpCheckCondition; - } - - public String getCondition() { - return condition; - } - - public void setCondition(String condition) { - this.condition = condition; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/mr/MapreduceParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/mr/MapreduceParameters.java deleted file mode 100644 index 1839e93929..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/mr/MapreduceParameters.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.mr; - -import cn.escheduler.common.enums.ProgramType; -import cn.escheduler.common.process.ResourceInfo; -import cn.escheduler.common.task.AbstractParameters; - -import java.util.List; -import java.util.stream.Collectors; - -public class MapreduceParameters extends AbstractParameters { - - /** - * major jar - */ - private ResourceInfo mainJar; - - /** - * major class - */ - private String mainClass; - - /** - * arguments - */ - private String mainArgs; - - /** - * other arguments - */ - private String others; - - /** - * queue - */ - private String queue; - - /** - * resource list - */ - private List resourceList; - - /** - * program type - * 0 JAVA,1 SCALA,2 PYTHON - */ - private ProgramType programType; - - - public String getMainClass() { - return mainClass; - } - - public void setMainClass(String mainClass) { - this.mainClass = mainClass; - } - - public String getMainArgs() { - return mainArgs; - } - - public void setMainArgs(String mainArgs) { - this.mainArgs = mainArgs; - } - - public String getOthers() { - return others; - } - - public void setOthers(String others) { - this.others = others; - } - - public String getQueue() { - return queue; - } - - public void setQueue(String queue) { - this.queue = queue; - } - - public List getResourceList() { - return this.resourceList; - } - - public void setResourceList(List resourceList) { - this.resourceList = resourceList; - } - - public void setMainJar(ResourceInfo mainJar) { - this.mainJar = mainJar; - } - - public ResourceInfo getMainJar() { - return mainJar; - } - - public ProgramType getProgramType() { - return programType; - } - - public void setProgramType(ProgramType programType) { - this.programType = programType; - } - - @Override - public boolean checkParameters() { - return this.mainJar != null && this.programType != null; - } - - @Override - public List getResourceFilesList() { - if (resourceList != null) { - this.resourceList.add(mainJar); - return resourceList.stream() - .map(p -> p.getRes()).collect(Collectors.toList()); - } - return null; - } - - @Override - public String toString() { - return "mainJar= " + mainJar - + "mainClass=" + mainClass - + "mainArgs=" + mainArgs - + "queue=" + queue - + "other mainArgs=" + others - ; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/procedure/ProcedureParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/procedure/ProcedureParameters.java deleted file mode 100644 index 5e84051929..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/procedure/ProcedureParameters.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.procedure; - -import cn.escheduler.common.task.AbstractParameters; -import org.apache.commons.lang.StringUtils; - -import java.util.ArrayList; -import java.util.List; - - -/** - * procedure parameter - */ -public class ProcedureParameters extends AbstractParameters { - - /** - * data source type,eg MYSQL, POSTGRES, HIVE ... - */ - private String type; - - /** - * data source id - */ - private int datasource; - - /** - * procedure name - */ - private String method; - - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public int getDatasource() { - return datasource; - } - - public void setDatasource(int datasource) { - this.datasource = datasource; - } - - public String getMethod() { - return method; - } - - public void setMethod(String method) { - this.method = method; - } - - @Override - public boolean checkParameters() { - return datasource != 0 && StringUtils.isNotEmpty(type) && StringUtils.isNotEmpty(method); - } - - @Override - public List getResourceFilesList() { - return new ArrayList<>(); - } - - @Override - public String toString() { - return "ProcessdureParam{" + - "type='" + type + '\'' + - ", datasource=" + datasource + - ", method='" + method + '\'' + - '}'; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/python/PythonParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/python/PythonParameters.java deleted file mode 100644 index 601b07b717..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/python/PythonParameters.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.python; - - -import cn.escheduler.common.process.ResourceInfo; -import cn.escheduler.common.task.AbstractParameters; - -import java.util.List; -import java.util.stream.Collectors; - -public class PythonParameters extends AbstractParameters { - /** - * origin python script - */ - private String rawScript; - - /** - * resource list - */ - private List resourceList; - - public String getRawScript() { - return rawScript; - } - - public void setRawScript(String rawScript) { - this.rawScript = rawScript; - } - - public List getResourceList() { - return resourceList; - } - - public void setResourceList(List resourceList) { - this.resourceList = resourceList; - } - - @Override - public boolean checkParameters() { - return rawScript != null && !rawScript.isEmpty(); - } - - @Override - public List getResourceFilesList() { - if (resourceList != null) { - return resourceList.stream() - .map(p -> p.getRes()).collect(Collectors.toList()); - } - - return null; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/shell/ShellParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/shell/ShellParameters.java deleted file mode 100644 index 00fbcc114f..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/shell/ShellParameters.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.shell; - - -import cn.escheduler.common.process.ResourceInfo; -import cn.escheduler.common.task.AbstractParameters; - -import java.util.List; -import java.util.stream.Collectors; - -/** - * shell parameters - */ -public class ShellParameters extends AbstractParameters { - /** - * shell script - */ - private String rawScript; - - /** - * resource list - */ - private List resourceList; - - public String getRawScript() { - return rawScript; - } - - public void setRawScript(String rawScript) { - this.rawScript = rawScript; - } - - public List getResourceList() { - return resourceList; - } - - public void setResourceList(List resourceList) { - this.resourceList = resourceList; - } - - @Override - public boolean checkParameters() { - return rawScript != null && !rawScript.isEmpty(); - } - - @Override - public List getResourceFilesList() { - if (resourceList != null) { - return resourceList.stream() - .map(p -> p.getRes()).collect(Collectors.toList()); - } - - return null; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/spark/SparkParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/spark/SparkParameters.java deleted file mode 100644 index 087492b1f2..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/spark/SparkParameters.java +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.spark; - -import cn.escheduler.common.enums.ProgramType; -import cn.escheduler.common.process.ResourceInfo; -import cn.escheduler.common.task.AbstractParameters; - -import java.util.List; -import java.util.stream.Collectors; - -/** - * spark parameters - */ -public class SparkParameters extends AbstractParameters { - - /** - * major jar - */ - private ResourceInfo mainJar; - - /** - * major class - */ - private String mainClass; - - /** - * deploy mode - */ - private String deployMode; - - /** - * arguments - */ - private String mainArgs; - - /** - * driver-cores Number of cores used by the driver, only in cluster mode - */ - private int driverCores; - - /** - * driver-memory Memory for driver - */ - - private String driverMemory; - - /** - * num-executors Number of executors to launch - */ - private int numExecutors; - - /** - * executor-cores Number of cores per executor - */ - private int executorCores; - - /** - * Memory per executor - */ - private String executorMemory; - - /** - * resource list - */ - private List resourceList; - - /** - * The YARN queue to submit to - */ - private String queue; - - /** - * other arguments - */ - private String others; - - /** - * program type - * 0 JAVA,1 SCALA,2 PYTHON - */ - private ProgramType programType; - - public ResourceInfo getMainJar() { - return mainJar; - } - - public void setMainJar(ResourceInfo mainJar) { - this.mainJar = mainJar; - } - - public String getMainClass() { - return mainClass; - } - - public void setMainClass(String mainClass) { - this.mainClass = mainClass; - } - - public String getDeployMode() { - return deployMode; - } - - public void setDeployMode(String deployMode) { - this.deployMode = deployMode; - } - - public String getMainArgs() { - return mainArgs; - } - - public void setMainArgs(String mainArgs) { - this.mainArgs = mainArgs; - } - - public int getDriverCores() { - return driverCores; - } - - public void setDriverCores(int driverCores) { - this.driverCores = driverCores; - } - - public String getDriverMemory() { - return driverMemory; - } - - public void setDriverMemory(String driverMemory) { - this.driverMemory = driverMemory; - } - - public int getNumExecutors() { - return numExecutors; - } - - public void setNumExecutors(int numExecutors) { - this.numExecutors = numExecutors; - } - - public int getExecutorCores() { - return executorCores; - } - - public void setExecutorCores(int executorCores) { - this.executorCores = executorCores; - } - - public String getExecutorMemory() { - return executorMemory; - } - - public void setExecutorMemory(String executorMemory) { - this.executorMemory = executorMemory; - } - - - public String getQueue() { - return queue; - } - - public void setQueue(String queue) { - this.queue = queue; - } - - public List getResourceList() { - return resourceList; - } - - public void setResourceList(List resourceList) { - this.resourceList = resourceList; - } - - public String getOthers() { - return others; - } - - public void setOthers(String others) { - this.others = others; - } - - public ProgramType getProgramType() { - return programType; - } - - public void setProgramType(ProgramType programType) { - this.programType = programType; - } - - @Override - public boolean checkParameters() { - return mainJar != null && programType != null; - } - - - @Override - public List getResourceFilesList() { - if(resourceList !=null ) { - this.resourceList.add(mainJar); - return resourceList.stream() - .map(p -> p.getRes()).collect(Collectors.toList()); - } - return null; - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlBinds.java b/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlBinds.java deleted file mode 100644 index 50975df3ed..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlBinds.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.sql; - -import cn.escheduler.common.process.Property; - -import java.util.Map; - -/** - * Used to contains both prepared sql string and its to-be-bind parameters - */ -public class SqlBinds { - private final String sql; - private final Map paramsMap; - - public SqlBinds(String sql, Map paramsMap) { - this.sql = sql; - this.paramsMap = paramsMap; - } - - public String getSql() { - return sql; - } - - public Map getParamsMap() { - return paramsMap; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlParameters.java deleted file mode 100644 index 0dc54b4c70..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlParameters.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.sql; - -import cn.escheduler.common.task.AbstractParameters; -import org.apache.commons.lang.StringUtils; - -import java.util.ArrayList; -import java.util.List; - -/** - * Sql/Hql parameter - */ -public class SqlParameters extends AbstractParameters { - /** - * data source type,eg MYSQL, POSTGRES, HIVE ... - */ - private String type; - - /** - * datasource id - */ - private int datasource; - - /** - * sql - */ - private String sql; - - /** - * sql type - * 0 query - * 1 NON_QUERY - */ - private int sqlType; - - /** - * udf list - */ - private String udfs; - /** - * show type - * 0 TABLE - * 1 TEXT - * 2 attachment - * 3 TABLE+attachment - */ - private String showType; - /** - * SQL connection parameters - */ - private String connParams; - /** - * Pre Statements - */ - private List preStatements; - /** - * Post Statements - */ - private List postStatements; - - /** - * title - */ - private String title; - - /** - * receivers - */ - private String receivers; - - /** - * receivers cc - */ - private String receiversCc; - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public int getDatasource() { - return datasource; - } - - public void setDatasource(int datasource) { - this.datasource = datasource; - } - - public String getSql() { - return sql; - } - - public void setSql(String sql) { - this.sql = sql; - } - - public String getUdfs() { - return udfs; - } - - public void setUdfs(String udfs) { - this.udfs = udfs; - } - - public int getSqlType() { - return sqlType; - } - - public void setSqlType(int sqlType) { - this.sqlType = sqlType; - } - - public String getShowType() { - return showType; - } - - public void setShowType(String showType) { - this.showType = showType; - } - - public String getConnParams() { - return connParams; - } - - public void setConnParams(String connParams) { - this.connParams = connParams; - } - - public String getTitle() { - return title; - } - - public void setTitle(String title) { - this.title = title; - } - - public String getReceivers() { - return receivers; - } - - public void setReceivers(String receivers) { - this.receivers = receivers; - } - - public String getReceiversCc() { - return receiversCc; - } - - public void setReceiversCc(String receiversCc) { - this.receiversCc = receiversCc; - } - public List getPreStatements() { - return preStatements; - } - - public void setPreStatements(List preStatements) { - this.preStatements = preStatements; - } - - public List getPostStatements() { - return postStatements; - } - - public void setPostStatements(List postStatements) { - this.postStatements = postStatements; - } - - @Override - public boolean checkParameters() { - return datasource != 0 && StringUtils.isNotEmpty(type) && StringUtils.isNotEmpty(sql); - } - - @Override - public List getResourceFilesList() { - return new ArrayList<>(); - } - - @Override - public String toString() { - return "SqlParameters{" + - "type='" + type + '\'' + - ", datasource=" + datasource + - ", sql='" + sql + '\'' + - ", sqlType=" + sqlType + - ", udfs='" + udfs + '\'' + - ", showType='" + showType + '\'' + - ", connParams='" + connParams + '\'' + - ", title='" + title + '\'' + - ", receivers='" + receivers + '\'' + - ", receiversCc='" + receiversCc + '\'' + - ", preStatements=" + preStatements + - ", postStatements=" + postStatements + - '}'; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlType.java b/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlType.java deleted file mode 100644 index 3a7a48bfbd..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlType.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.sql; - - -public enum SqlType { - /** - * sql type - * 0 query - * 1 NON_QUERY - */ - QUERY, NON_QUERY -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/subprocess/SubProcessParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/subprocess/SubProcessParameters.java deleted file mode 100644 index 0ad69ca2ef..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/subprocess/SubProcessParameters.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.task.subprocess; -import cn.escheduler.common.task.AbstractParameters; - -import java.util.ArrayList; -import java.util.List; - - -public class SubProcessParameters extends AbstractParameters { - - /** - * process definition id - */ - private Integer processDefinitionId; - - public void setProcessDefinitionId(Integer processDefinitionId){ - this.processDefinitionId = processDefinitionId; - } - - public Integer getProcessDefinitionId(){ - return this.processDefinitionId; - } - - @Override - public boolean checkParameters() { - return this.processDefinitionId != 0; - } - - @Override - public List getResourceFilesList() { - return new ArrayList<>(); - } -} \ No newline at end of file diff --git a/escheduler-common/src/main/java/cn/escheduler/common/thread/Stopper.java b/escheduler-common/src/main/java/cn/escheduler/common/thread/Stopper.java deleted file mode 100644 index 76f31c4a22..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/thread/Stopper.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.thread; - -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * if the process closes, a signal is placed as true, and all threads get this flag to stop working - */ -public class Stopper { - - private static volatile AtomicBoolean signal = new AtomicBoolean(false); - - public static final boolean isStoped(){ - return signal.get(); - } - - public static final boolean isRunning(){ - return !signal.get(); - } - - public static final void stop(){ - signal.getAndSet(true); - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/thread/ThreadPoolExecutors.java b/escheduler-common/src/main/java/cn/escheduler/common/thread/ThreadPoolExecutors.java deleted file mode 100644 index ea03026d3a..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/thread/ThreadPoolExecutors.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.thread; - - -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.PrintWriter; -import java.lang.management.ThreadInfo; -import java.util.List; -import java.util.Map; -import java.util.concurrent.*; -import java.util.concurrent.atomic.AtomicLong; - - -/** - * - * thread pool's single instance - * - */ -public class ThreadPoolExecutors { - - private static final Logger logger = LoggerFactory.getLogger(ThreadPoolExecutors.class); - private static Executor executor; - private static volatile ThreadPoolExecutors threadPoolExecutors; - - private ThreadPoolExecutors(){} - - - public static ThreadPoolExecutors getInstance(){ - return getInstance("thread_pool",0); - } - - public static ThreadPoolExecutors getInstance(String name, int maxThreads){ - - if (null == threadPoolExecutors) { - - synchronized (ThreadPoolExecutors.class) { - - if(null == threadPoolExecutors) { - threadPoolExecutors = new ThreadPoolExecutors(); - } - if(null == executor) { - executor = new Executor(null == name? "thread_pool" : name, maxThreads == 0? Runtime.getRuntime().availableProcessors() * 3 : maxThreads); - } - } - } - - return threadPoolExecutors; - } - - /** - * Executes the given task sometime in the future. The task may execute in a new thread or in an existing pooled thread. - * If the task cannot be submitted for execution, either because this executor has been shutdown or because its capacity has been reached, - * the task is handled by the current RejectedExecutionHandler. - * @param event - */ - public void execute(final Runnable event) { - Executor executor = getExecutor(); - if (executor == null) { - logger.error("Cannot execute [" + event + "] because the executor is missing."); - } else { - executor.execute(event); - } - } - - - public Future submit(Runnable event) { - Executor executor = getExecutor(); - if (executor == null) { - logger.error("Cannot submit [" + event + "] because the executor is missing."); - } else { - return executor.submit(event); - } - - return null; - - } - - - public Future submit(Callable task) { - Executor executor = getExecutor(); - if (executor == null) { - logger.error("Cannot submit [" + task + "] because the executor is missing."); - } else { - return executor.submit(task); - } - - return null; - } - - - - public void printStatus() { - Executor executor = getExecutor(); - executor.getStatus().dumpInfo(); - } - - - private Executor getExecutor() { - return executor; - } - - - public void shutdown() { - if (executor != null) { - List wasRunning = executor.threadPoolExecutor - .shutdownNow(); - if (!wasRunning.isEmpty()) { - logger.info(executor + " had " + wasRunning + " on shutdown"); - } - } - } - - - /** - * Executor instance. - */ - private static class Executor { - /** - * how long to retain excess threads - */ - final long keepAliveTimeInMillis = 1000; - /** - * the thread pool executor that services the requests - */ - final TrackingThreadPoolExecutor threadPoolExecutor; - /** - * work queue to use - unbounded queue - */ - final BlockingQueue q = new LinkedBlockingQueue(); - private final String name; - private static final AtomicLong seqids = new AtomicLong(0); - private final long id; - - protected Executor(String name, int maxThreads) { - this.id = seqids.incrementAndGet(); - this.name = name; - //create the thread pool executor - this.threadPoolExecutor = new TrackingThreadPoolExecutor( - maxThreads, maxThreads, keepAliveTimeInMillis, - TimeUnit.MILLISECONDS, q); - // name the threads for this threadpool - ThreadFactoryBuilder tfb = new ThreadFactoryBuilder(); - tfb.setNameFormat(this.name + "-%d"); - this.threadPoolExecutor.setThreadFactory(tfb.build()); - } - - /** - * Submit the event to the queue for handling. - * - * @param event - */ - void execute(final Runnable event) { - this.threadPoolExecutor.execute(event); - } - - Future submit(Runnable event) { - return this.threadPoolExecutor.submit(event); - } - - Future submit(Callable event) { - return this.threadPoolExecutor.submit(event); - } - - - @Override - public String toString() { - return getClass().getSimpleName() + "-" + id + "-" + name; - } - - public ExecutorStatus getStatus() { - List queuedEvents = Lists.newArrayList(); - for (Runnable r : q) { - queuedEvents.add(r); - } - - List running = Lists.newArrayList(); - for (Map.Entry e : threadPoolExecutor - .getRunningTasks().entrySet()) { - Runnable r = e.getValue(); - running.add(new RunningEventStatus(e.getKey(), r)); - } - - return new ExecutorStatus(this, queuedEvents, running); - } - } - - - /** - * A subclass of ThreadPoolExecutor that keeps track of the Runnables that - * are executing at any given point in time. - */ - static class TrackingThreadPoolExecutor extends ThreadPoolExecutor { - private ConcurrentMap running = Maps - .newConcurrentMap(); - - public TrackingThreadPoolExecutor(int corePoolSize, - int maximumPoolSize, long keepAliveTime, TimeUnit unit, - BlockingQueue workQueue) { - super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue); - } - - @Override - protected void afterExecute(Runnable r, Throwable t) { - super.afterExecute(r, t); - running.remove(Thread.currentThread()); - } - - @Override - protected void beforeExecute(Thread t, Runnable r) { - Runnable oldPut = running.put(t, r); - assert oldPut == null : "inconsistency for thread " + t; - super.beforeExecute(t, r); - } - - /** - * @return a map of the threads currently running tasks inside this - * executor. Each key is an active thread, and the value is the - * task that is currently running. Note that this is not a - * stable snapshot of the map. - */ - public ConcurrentMap getRunningTasks() { - return running; - } - } - - - /** - * A snapshot of the status of a particular executor. This includes the - * contents of the executor's pending queue, as well as the threads and - * events currently being processed. - * - * This is a consistent snapshot that is immutable once constructed. - */ - public static class ExecutorStatus { - final Executor executor; - final List queuedEvents; - final List running; - - ExecutorStatus(Executor executor, List queuedEvents, - List running) { - this.executor = executor; - this.queuedEvents = queuedEvents; - this.running = running; - } - - public void dumpInfo() { - - PrintWriter out = new PrintWriter(System.out); - - out.write("Status for executor: " + executor + "\n"); - out.write("=======================================\n"); - out.write(queuedEvents.size() + " events queued, " - + running.size() + " running\n"); - if (!queuedEvents.isEmpty()) { - out.write("Queued:\n"); - for (Runnable e : queuedEvents) { - out.write(" " + e + "\n"); - } - out.write("\n"); - } - if (!running.isEmpty()) { - out.write("Running:\n"); - for (RunningEventStatus stat : running) { - out.write(" Running on thread '" - + stat.threadInfo.getThreadName() + "': " - + stat.event + "\n"); - out.write(ThreadUtils.formatThreadInfo( - stat.threadInfo, " ")); - out.write("\n"); - } - } - out.flush(); - } - } - - - /** - * The status of a particular event that is in the middle of being handled - * by an executor. - */ - public static class RunningEventStatus { - final ThreadInfo threadInfo; - final Runnable event; - - public RunningEventStatus(Thread t, Runnable event) { - this.threadInfo = ThreadUtils.getThreadInfo(t); - this.event = event; - } - } -} \ No newline at end of file diff --git a/escheduler-common/src/main/java/cn/escheduler/common/thread/ThreadUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/thread/ThreadUtils.java deleted file mode 100644 index 7b2fc928a6..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/thread/ThreadUtils.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.thread; - -import com.google.common.util.concurrent.ThreadFactoryBuilder; - -import java.lang.management.ManagementFactory; -import java.lang.management.ThreadInfo; -import java.lang.management.ThreadMXBean; -import java.util.concurrent.*; - -/** - * thread utils - */ -public class ThreadUtils { - - - private static final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); - private static final int STACK_DEPTH = 20; - - /** - Wrapper over newCachedThreadPool. Thread names are formatted as prefix-ID, where ID is a - * unique, sequentially assigned integer. - * @param prefix - * @return - */ - public static ThreadPoolExecutor newDaemonCachedThreadPool(String prefix){ - ThreadFactory threadFactory = namedThreadFactory(prefix); - return ((ThreadPoolExecutor) Executors.newCachedThreadPool(threadFactory)); - } - - /** - * Create a thread factory that names threads with a prefix and also sets the threads to daemon. - * @param prefix - * @return - */ - private static ThreadFactory namedThreadFactory(String prefix) { - return new ThreadFactoryBuilder().setDaemon(true).setNameFormat(prefix + "-%d").build(); - } - - - /** - * Create a cached thread pool whose max number of threads is `maxThreadNumber`. Thread names - * are formatted as prefix-ID, where ID is a unique, sequentially assigned integer. - * @param prefix - * @param maxThreadNumber - * @param keepAliveSeconds - * @return - */ - public static ThreadPoolExecutor newDaemonCachedThreadPool(String prefix , - int maxThreadNumber, - int keepAliveSeconds){ - ThreadFactory threadFactory = namedThreadFactory(prefix); - ThreadPoolExecutor threadPool = new ThreadPoolExecutor( - // corePoolSize: the max number of threads to create before queuing the tasks - maxThreadNumber, - // maximumPoolSize: because we use LinkedBlockingDeque, this one is not used - maxThreadNumber, - keepAliveSeconds, - TimeUnit.SECONDS, - new LinkedBlockingQueue(), - threadFactory); - threadPool.allowCoreThreadTimeOut(true); - return threadPool; - } - - - /** - * Wrapper over newFixedThreadPool. Thread names are formatted as prefix-ID, where ID is a - * unique, sequentially assigned integer. - * @param nThreads - * @param prefix - * @return - */ - public static ThreadPoolExecutor newDaemonFixedThreadPool(int nThreads , String prefix){ - ThreadFactory threadFactory = namedThreadFactory(prefix); - return ((ThreadPoolExecutor) Executors.newFixedThreadPool(nThreads, threadFactory)); - } - - /** - * Wrapper over newSingleThreadExecutor. - * @param threadName - * @return - */ - public static ExecutorService newDaemonSingleThreadExecutor(String threadName){ - ThreadFactory threadFactory = new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat(threadName) - .build(); - return Executors.newSingleThreadExecutor(threadFactory); - } - - /** - * Wrapper over newDaemonFixedThreadExecutor. - * @param threadName - * @param threadsNum - * @return - */ - public static ExecutorService newDaemonFixedThreadExecutor(String threadName,int threadsNum){ - ThreadFactory threadFactory = new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat(threadName) - .build(); - return Executors.newFixedThreadPool(threadsNum,threadFactory); - } - - /** - * Wrapper over ScheduledThreadPoolExecutor - * @param corePoolSize - * @return - */ - public static ScheduledExecutorService newDaemonThreadScheduledExecutor(String threadName,int corePoolSize) { - ThreadFactory threadFactory = new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat(threadName) - .build(); - ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(corePoolSize, threadFactory); - // By default, a cancelled task is not automatically removed from the work queue until its delay - // elapses. We have to enable it manually. - executor.setRemoveOnCancelPolicy(true); - return executor; - } - - - public static ThreadInfo getThreadInfo(Thread t) { - long tid = t.getId(); - return threadBean.getThreadInfo(tid, STACK_DEPTH); - } - - - /** - * Format the given ThreadInfo object as a String. - * @param indent a prefix for each line, used for nested indentation - */ - public static String formatThreadInfo(ThreadInfo threadInfo, String indent) { - StringBuilder sb = new StringBuilder(); - appendThreadInfo(sb, threadInfo, indent); - return sb.toString(); - } - - - /** - * Print all of the thread's information and stack traces. - * - * @param sb - * @param info - * @param indent - */ - public static void appendThreadInfo(StringBuilder sb, - ThreadInfo info, - String indent) { - boolean contention = threadBean.isThreadContentionMonitoringEnabled(); - - if (info == null) { - sb.append(indent).append("Inactive (perhaps exited while monitoring was done)\n"); - return; - } - String taskName = getTaskName(info.getThreadId(), info.getThreadName()); - sb.append(indent).append("Thread ").append(taskName).append(":\n"); - - Thread.State state = info.getThreadState(); - sb.append(indent).append(" State: ").append(state).append("\n"); - sb.append(indent).append(" Blocked count: ").append(info.getBlockedCount()).append("\n"); - sb.append(indent).append(" Waited count: ").append(info.getWaitedCount()).append("\n"); - if (contention) { - sb.append(indent).append(" Blocked time: " + info.getBlockedTime()).append("\n"); - sb.append(indent).append(" Waited time: " + info.getWaitedTime()).append("\n"); - } - if (state == Thread.State.WAITING) { - sb.append(indent).append(" Waiting on ").append(info.getLockName()).append("\n"); - } else if (state == Thread.State.BLOCKED) { - sb.append(indent).append(" Blocked on ").append(info.getLockName()).append("\n"); - sb.append(indent).append(" Blocked by ").append( - getTaskName(info.getLockOwnerId(), info.getLockOwnerName())).append("\n"); - } - sb.append(indent).append(" Stack:").append("\n"); - for (StackTraceElement frame: info.getStackTrace()) { - sb.append(indent).append(" ").append(frame.toString()).append("\n"); - } - } - - private static String getTaskName(long id, String name) { - if (name == null) { - return Long.toString(id); - } - return id + " (" + name + ")"; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/Bytes.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/Bytes.java deleted file mode 100644 index 9cb4e094a6..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/Bytes.java +++ /dev/null @@ -1,697 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; - -/** - * Utility class that handles Bytes - */ -public class Bytes { - - private static final Logger logger = LoggerFactory.getLogger(Bytes.class); - public static final String UTF8_ENCODING = "UTF-8"; - //An empty instance. - public static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; - - /** - * Size of int in bytes - */ - public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; - - /** - * Size of long in bytes - */ - public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; - - /** - * Size of short in bytes - */ - public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; - - - - /** - * Put bytes at the specified byte array position. - * @param tgtBytes the byte array - * @param tgtOffset position in the array - * @param srcBytes array to write out - * @param srcOffset source offset - * @param srcLength source length - * @return incremented offset - */ - public static int putBytes(byte[] tgtBytes, int tgtOffset, byte[] srcBytes, - int srcOffset, int srcLength) { - System.arraycopy(srcBytes, srcOffset, tgtBytes, tgtOffset, srcLength); - return tgtOffset + srcLength; - } - - /** - * Write a single byte out to the specified byte array position. - * @param bytes the byte array - * @param offset position in the array - * @param b byte to write out - * @return incremented offset - */ - public static int putByte(byte[] bytes, int offset, byte b) { - bytes[offset] = b; - return offset + 1; - } - - /** - * Returns a new byte array, copied from the passed ByteBuffer. - * @param bb A ByteBuffer - * @return the byte array - */ - public static byte[] toBytes(ByteBuffer bb) { - int length = bb.limit(); - byte [] result = new byte[length]; - System.arraycopy(bb.array(), bb.arrayOffset(), result, 0, length); - return result; - } - - /** - * @param b Presumed UTF-8 encoded byte array. - * @return String made from b - */ - public static String toString(final byte [] b) { - if (b == null) { - return null; - } - return toString(b, 0, b.length); - } - - /** - * Joins two byte arrays together using a separator. - * @param b1 The first byte array. - * @param sep The separator to use. - * @param b2 The second byte array. - */ - public static String toString(final byte [] b1, - String sep, - final byte [] b2) { - return toString(b1, 0, b1.length) + sep + toString(b2, 0, b2.length); - } - - /** - * This method will convert utf8 encoded bytes into a string. If - * an UnsupportedEncodingException occurs, this method will eat it - * and return null instead. - * - * @param b Presumed UTF-8 encoded byte array. - * @param off offset into array - * @param len length of utf-8 sequence - * @return String made from b or null - */ - public static String toString(final byte [] b, int off, int len) { - if (b == null) { - return null; - } - if (len == 0) { - return ""; - } - return new String(b, off, len, StandardCharsets.UTF_8); - } - - - /** - * Converts a string to a UTF-8 byte array. - * @param s string - * @return the byte array - */ - public static byte[] toBytes(String s) { - return s.getBytes(StandardCharsets.UTF_8); - } - - /** - * Convert a boolean to a byte array. True becomes -1 - * and false becomes 0. - * - * @param b value - * @return b encoded in a byte array. - */ - public static byte [] toBytes(final boolean b) { - return new byte[] { b ? (byte) -1 : (byte) 0 }; - } - - /** - * Reverses {@link #toBytes(boolean)} - * @param b array - * @return True or false. - */ - public static boolean toBoolean(final byte [] b) { - if (b.length != 1) { - throw new IllegalArgumentException("Array has wrong size: " + b.length); - } - return b[0] != (byte) 0; - } - - /** - * Convert a long value to a byte array using big-endian. - * - * @param val value to convert - * @return the byte array - */ - public static byte[] toBytes(long val) { - byte [] b = new byte[8]; - for (int i = 7; i > 0; i--) { - b[i] = (byte) val; - val >>>= 8; - } - b[0] = (byte) val; - return b; - } - - /** - * Converts a byte array to a long value. Reverses - * {@link #toBytes(long)} - * @param bytes array - * @return the long value - */ - public static long toLong(byte[] bytes) { - return toLong(bytes, 0, SIZEOF_LONG); - } - - /** - * Converts a byte array to a long value. Assumes there will be - * {@link #SIZEOF_LONG} bytes available. - * - * @param bytes bytes - * @param offset offset - * @return the long value - */ - public static long toLong(byte[] bytes, int offset) { - return toLong(bytes, offset, SIZEOF_LONG); - } - - /** - * Converts a byte array to a long value. - * - * @param bytes array of bytes - * @param offset offset into array - * @param length length of data (must be {@link #SIZEOF_LONG}) - * @return the long value - * @throws IllegalArgumentException if length is not {@link #SIZEOF_LONG} or - * if there's not enough room in the array at the offset indicated. - */ - public static long toLong(byte[] bytes, int offset, final int length) { - if (length != SIZEOF_LONG || offset + length > bytes.length) { - throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_LONG); - } - long l = 0; - for(int i = offset; i < offset + length; i++) { - l <<= 8; - l ^= bytes[i] & 0xFF; - } - return l; - } - - private static IllegalArgumentException - explainWrongLengthOrOffset(final byte[] bytes, - final int offset, - final int length, - final int expectedLength) { - String reason; - if (length != expectedLength) { - reason = "Wrong length: " + length + ", expected " + expectedLength; - } else { - reason = "offset (" + offset + ") + length (" + length + ") exceed the" - + " capacity of the array: " + bytes.length; - } - return new IllegalArgumentException(reason); - } - - /** - * Put a long value out to the specified byte array position. - * @param bytes the byte array - * @param offset position in the array - * @param val long to write out - * @return incremented offset - * @throws IllegalArgumentException if the byte array given doesn't have - * enough room at the offset specified. - */ - public static int putLong(byte[] bytes, int offset, long val) { - if (bytes.length - offset < SIZEOF_LONG) { - throw new IllegalArgumentException("Not enough room to put a long at" - + " offset " + offset + " in a " + bytes.length + " byte array"); - } - for(int i = offset + 7; i > offset; i--) { - bytes[i] = (byte) val; - val >>>= 8; - } - bytes[offset] = (byte) val; - return offset + SIZEOF_LONG; - } - - /** - * Presumes float encoded as IEEE 754 floating-point "single format" - * @param bytes byte array - * @return Float made from passed byte array. - */ - public static float toFloat(byte [] bytes) { - return toFloat(bytes, 0); - } - - /** - * Presumes float encoded as IEEE 754 floating-point "single format" - * @param bytes array to convert - * @param offset offset into array - * @return Float made from passed byte array. - */ - public static float toFloat(byte [] bytes, int offset) { - return Float.intBitsToFloat(toInt(bytes, offset, SIZEOF_INT)); - } - - /** - * @param bytes byte array - * @param offset offset to write to - * @param f float value - * @return New offset in bytes - */ - public static int putFloat(byte [] bytes, int offset, float f) { - return putInt(bytes, offset, Float.floatToRawIntBits(f)); - } - - /** - * @param f float value - * @return the float represented as byte [] - */ - public static byte [] toBytes(final float f) { - // Encode it as int - return Bytes.toBytes(Float.floatToRawIntBits(f)); - } - - /** - * @param bytes byte array - * @return Return double made from passed bytes. - */ - public static double toDouble(final byte [] bytes) { - return toDouble(bytes, 0); - } - - /** - * @param bytes byte array - * @param offset offset where double is - * @return Return double made from passed bytes. - */ - public static double toDouble(final byte [] bytes, final int offset) { - return Double.longBitsToDouble(toLong(bytes, offset, SIZEOF_LONG)); - } - - /** - * @param bytes byte array - * @param offset offset to write to - * @param d value - * @return New offset into array bytes - */ - public static int putDouble(byte [] bytes, int offset, double d) { - return putLong(bytes, offset, Double.doubleToLongBits(d)); - } - - /** - * Serialize a double as the IEEE 754 double format output. The resultant - * array will be 8 bytes long. - * - * @param d value - * @return the double represented as byte [] - */ - public static byte [] toBytes(final double d) { - // Encode it as a long - return Bytes.toBytes(Double.doubleToRawLongBits(d)); - } - - /** - * Convert an int value to a byte array - * @param val value - * @return the byte array - */ - public static byte[] toBytes(int val) { - byte [] b = new byte[4]; - for(int i = 3; i > 0; i--) { - b[i] = (byte) val; - val >>>= 8; - } - b[0] = (byte) val; - return b; - } - - /** - * Converts a byte array to an int value - * @param bytes byte array - * @return the int value - */ - public static int toInt(byte[] bytes) { - return toInt(bytes, 0, SIZEOF_INT); - } - - /** - * Converts a byte array to an int value - * @param bytes byte array - * @param offset offset into array - * @return the int value - */ - public static int toInt(byte[] bytes, int offset) { - return toInt(bytes, offset, SIZEOF_INT); - } - - /** - * Converts a byte array to an int value - * @param bytes byte array - * @param offset offset into array - * @param length length of int (has to be {@link #SIZEOF_INT}) - * @return the int value - * @throws IllegalArgumentException if length is not {@link #SIZEOF_INT} or - * if there's not enough room in the array at the offset indicated. - */ - public static int toInt(byte[] bytes, int offset, final int length) { - if (length != SIZEOF_INT || offset + length > bytes.length) { - throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_INT); - } - int n = 0; - for(int i = offset; i < (offset + length); i++) { - n <<= 8; - n ^= bytes[i] & 0xFF; - } - return n; - } - - /** - * Put an int value out to the specified byte array position. - * @param bytes the byte array - * @param offset position in the array - * @param val int to write out - * @return incremented offset - * @throws IllegalArgumentException if the byte array given doesn't have - * enough room at the offset specified. - */ - public static int putInt(byte[] bytes, int offset, int val) { - if (bytes.length - offset < SIZEOF_INT) { - throw new IllegalArgumentException("Not enough room to put an int at" - + " offset " + offset + " in a " + bytes.length + " byte array"); - } - for(int i= offset + 3; i > offset; i--) { - bytes[i] = (byte) val; - val >>>= 8; - } - bytes[offset] = (byte) val; - return offset + SIZEOF_INT; - } - - /** - * Convert a short value to a byte array of {@link #SIZEOF_SHORT} bytes long. - * @param val value - * @return the byte array - */ - public static byte[] toBytes(short val) { - byte[] b = new byte[SIZEOF_SHORT]; - b[1] = (byte) val; - val >>= 8; - b[0] = (byte) val; - return b; - } - - /** - * Converts a byte array to a short value - * @param bytes byte array - * @return the short value - */ - public static short toShort(byte[] bytes) { - return toShort(bytes, 0, SIZEOF_SHORT); - } - - /** - * Converts a byte array to a short value - * @param bytes byte array - * @param offset offset into array - * @return the short value - */ - public static short toShort(byte[] bytes, int offset) { - return toShort(bytes, offset, SIZEOF_SHORT); - } - - /** - * Converts a byte array to a short value - * @param bytes byte array - * @param offset offset into array - * @param length length, has to be {@link #SIZEOF_SHORT} - * @return the short value - * @throws IllegalArgumentException if length is not {@link #SIZEOF_SHORT} - * or if there's not enough room in the array at the offset indicated. - */ - public static short toShort(byte[] bytes, int offset, final int length) { - if (length != SIZEOF_SHORT || offset + length > bytes.length) { - throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_SHORT); - } - short n = 0; - n ^= bytes[offset] & 0xFF; - n <<= 8; - n ^= bytes[offset+1] & 0xFF; - return n; - } - - /** - * This method will get a sequence of bytes from pos -> limit, - * but will restore pos after. - * @param buf - * @return byte array - */ - public static byte[] getBytes(ByteBuffer buf) { - int savedPos = buf.position(); - byte [] newBytes = new byte[buf.remaining()]; - buf.get(newBytes); - buf.position(savedPos); - return newBytes; - } - - /** - * Put a short value out to the specified byte array position. - * @param bytes the byte array - * @param offset position in the array - * @param val short to write out - * @return incremented offset - * @throws IllegalArgumentException if the byte array given doesn't have - * enough room at the offset specified. - */ - public static int putShort(byte[] bytes, int offset, short val) { - if (bytes.length - offset < SIZEOF_SHORT) { - throw new IllegalArgumentException("Not enough room to put a short at" - + " offset " + offset + " in a " + bytes.length + " byte array"); - } - bytes[offset+1] = (byte) val; - val >>= 8; - bytes[offset] = (byte) val; - return offset + SIZEOF_SHORT; - } - - /** - * Convert a BigDecimal value to a byte array - * - * @param val - * @return the byte array - */ - public static byte[] toBytes(BigDecimal val) { - byte[] valueBytes = val.unscaledValue().toByteArray(); - byte[] result = new byte[valueBytes.length + SIZEOF_INT]; - int offset = putInt(result, 0, val.scale()); - putBytes(result, offset, valueBytes, 0, valueBytes.length); - return result; - } - - - /** - * Converts a byte array to a BigDecimal - * - * @param bytes - * @return the char value - */ - public static BigDecimal toBigDecimal(byte[] bytes) { - return toBigDecimal(bytes, 0, bytes.length); - } - - /** - * Converts a byte array to a BigDecimal value - * - * @param bytes - * @param offset - * @param length - * @return the char value - */ - public static BigDecimal toBigDecimal(byte[] bytes, int offset, final int length) { - if (bytes == null || length < SIZEOF_INT + 1 || - (offset + length > bytes.length)) { - return null; - } - - int scale = toInt(bytes, offset); - byte[] tcBytes = new byte[length - SIZEOF_INT]; - System.arraycopy(bytes, offset + SIZEOF_INT, tcBytes, 0, length - SIZEOF_INT); - return new BigDecimal(new BigInteger(tcBytes), scale); - } - - /** - * Put a BigDecimal value out to the specified byte array position. - * - * @param bytes the byte array - * @param offset position in the array - * @param val BigDecimal to write out - * @return incremented offset - */ - public static int putBigDecimal(byte[] bytes, int offset, BigDecimal val) { - if (bytes == null) { - return offset; - } - - byte[] valueBytes = val.unscaledValue().toByteArray(); - byte[] result = new byte[valueBytes.length + SIZEOF_INT]; - offset = putInt(result, offset, val.scale()); - return putBytes(result, offset, valueBytes, 0, valueBytes.length); - } - - /** - * @param a lower half - * @param b upper half - * @return New array that has a in lower half and b in upper half. - */ - public static byte [] add(final byte [] a, final byte [] b) { - return add(a, b, EMPTY_BYTE_ARRAY); - } - - /** - * @param a first third - * @param b second third - * @param c third third - * @return New array made from a, b and c - */ - public static byte [] add(final byte [] a, final byte [] b, final byte [] c) { - byte [] result = new byte[a.length + b.length + c.length]; - System.arraycopy(a, 0, result, 0, a.length); - System.arraycopy(b, 0, result, a.length, b.length); - System.arraycopy(c, 0, result, a.length + b.length, c.length); - return result; - } - - /** - * @param a array - * @param length amount of bytes to grab - * @return First length bytes from a - */ - public static byte [] head(final byte [] a, final int length) { - if (a.length < length) { - return null; - } - byte [] result = new byte[length]; - System.arraycopy(a, 0, result, 0, length); - return result; - } - - /** - * @param a array - * @param length amount of bytes to snarf - * @return Last length bytes from a - */ - public static byte [] tail(final byte [] a, final int length) { - if (a.length < length) { - return null; - } - byte [] result = new byte[length]; - System.arraycopy(a, a.length - length, result, 0, length); - return result; - } - - /** - * @param a array - * @param length new array size - * @return Value in a plus length prepended 0 bytes - */ - public static byte [] padHead(final byte [] a, final int length) { - byte[] padding = getPadding(length); - return add(padding,a); - } - - private static byte[] getPadding(int length) { - byte[] padding = new byte[length]; - for (int i = 0; i < length; i++) { - padding[i] = 0; - } - return padding; - } - - /** - * @param a array - * @param length new array size - * @return Value in a plus length appended 0 bytes - */ - public static byte [] padTail(final byte [] a, final int length) { - byte[] padding = getPadding(length); - return add(a,padding); - } - - - - /** - * @param bytes array to hash - * @param offset offset to start from - * @param length length to hash - * */ - public static int hashCode(byte[] bytes, int offset, int length) { - int hash = 1; - for (int i = offset; i < offset + length; i++) { - hash = (31 * hash) + (int) bytes[i]; - } - return hash; - } - - /** - * @param t operands - * @return Array of byte arrays made from passed array of Text - */ - public static byte [][] toByteArrays(final String [] t) { - byte [][] result = new byte[t.length][]; - for (int i = 0; i < t.length; i++) { - result[i] = Bytes.toBytes(t[i]); - } - return result; - } - - /** - * @param column operand - * @return A byte array of a byte array where first and only entry is - * column - */ - public static byte [][] toByteArrays(final String column) { - return toByteArrays(toBytes(column)); - } - - /** - * @param column operand - * @return A byte array of a byte array where first and only entry is - * column - */ - public static byte [][] toByteArrays(final byte [] column) { - byte [][] result = new byte[1][]; - result[0] = column; - return result; - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/CollectionUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/CollectionUtils.java deleted file mode 100644 index 907b6b51a9..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/CollectionUtils.java +++ /dev/null @@ -1,292 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - - -import org.apache.commons.collections.BeanMap; -import org.apache.commons.lang.StringUtils; - -import java.util.*; - - -/** - * Provides utility methods and decorators for {@link Collection} instances. - *

- * Various utility methods might put the input objects into a Set/Map/Bag. In case - * the input objects override {@link Object#equals(Object)}, it is mandatory that - * the general contract of the {@link Object#hashCode()} method is maintained. - *

- * NOTE: From 4.0, method parameters will take {@link Iterable} objects when possible. - * - * @version $Id: CollectionUtils.java 1686855 2015-06-22 13:00:27Z tn $ - * @since 1.0 - */ -public class CollectionUtils { - - /** - * Returns a new {@link Collection} containing a minus a subset of - * b. Only the elements of b that satisfy the predicate - * condition, p are subtracted from a. - *

- *

The cardinality of each element e in the returned {@link Collection} - * that satisfies the predicate condition will be the cardinality of e in a - * minus the cardinality of e in b, or zero, whichever is greater.

- *

The cardinality of each element e in the returned {@link Collection} that does not - * satisfy the predicate condition will be equal to the cardinality of e in a.

- * - * @param a the collection to subtract from, must not be null - * @param b the collection to subtract, must not be null - * @return a new collection with the results - * @see Collection#removeAll - */ - public static Collection subtract(Set a, Set b) { - return org.apache.commons.collections4.CollectionUtils.subtract(a, b); - } - - public static boolean isNotEmpty(Collection coll) { - return !isEmpty(coll); - } - - public static boolean isEmpty(Collection coll) { - return coll == null || coll.isEmpty(); - } - - /** - * String to map - * - * @param str string - * @param separator separator - * @return - */ - public static Map stringToMap(String str, String separator) { - return stringToMap(str, separator, ""); - } - - /** - * String to map - * - * @param str string - * @param separator separator - * @param keyPrefix prefix - * @return - */ - public static Map stringToMap(String str, String separator, String keyPrefix) { - if (null == str || "".equals(str)) { - return null; - } - if (null == separator || "".equals(separator)) { - return null; - } - String[] strings = str.split(separator); - int mapLength = strings.length; - if ((strings.length % 2) != 0) { - mapLength = mapLength + 1; - } - - Map map = new HashMap<>(mapLength); - for (int i = 0; i < strings.length; i++) { - String[] strArray = strings[i].split("="); - //strArray[0] KEY strArray[1] VALUE - if (StringUtils.isEmpty(keyPrefix)) { - map.put(strArray[0], strArray[1]); - } else { - map.put(keyPrefix + strArray[0], strArray[1]); - } - } - return map; - } - - - /** - * Helper class to easily access cardinality properties of two collections. - * - * @param the element type - */ - private static class CardinalityHelper { - - /** - * Contains the cardinality for each object in collection A. - */ - final Map cardinalityA; - - /** - * Contains the cardinality for each object in collection B. - */ - final Map cardinalityB; - - /** - * Create a new CardinalityHelper for two collections. - * - * @param a the first collection - * @param b the second collection - */ - public CardinalityHelper(final Iterable a, final Iterable b) { - cardinalityA = CollectionUtils.getCardinalityMap(a); - cardinalityB = CollectionUtils.getCardinalityMap(b); - } - - /** - * Returns the maximum frequency of an object. - * - * @param obj the object - * @return the maximum frequency of the object - */ - public final int max(final Object obj) { - return Math.max(freqA(obj), freqB(obj)); - } - - /** - * Returns the minimum frequency of an object. - * - * @param obj the object - * @return the minimum frequency of the object - */ - public final int min(final Object obj) { - return Math.min(freqA(obj), freqB(obj)); - } - - /** - * Returns the frequency of this object in collection A. - * - * @param obj the object - * @return the frequency of the object in collection A - */ - public int freqA(final Object obj) { - return getFreq(obj, cardinalityA); - } - - /** - * Returns the frequency of this object in collection B. - * - * @param obj the object - * @return the frequency of the object in collection B - */ - public int freqB(final Object obj) { - return getFreq(obj, cardinalityB); - } - - private final int getFreq(final Object obj, final Map freqMap) { - final Integer count = freqMap.get(obj); - if (count != null) { - return count.intValue(); - } - return 0; - } - } - - /** - * returns {@code true} iff the given {@link Collection}s contain - * exactly the same elements with exactly the same cardinalities. - * - * @param a the first collection - * @param b the second collection - * @return Returns true iff the given Collections contain exactly the same elements with exactly the same cardinalities. - * That is, iff the cardinality of e in a is equal to the cardinality of e in b, for each element e in a or b. - */ - public static boolean equalLists(Collection a, Collection b) { - if (a == null && b == null) { - return true; - } - - if ((a == null && b != null) || a != null && b == null) { - return false; - } - - return isEqualCollection(a, b); - } - - /** - * Returns {@code true} iff the given {@link Collection}s contain - * exactly the same elements with exactly the same cardinalities. - *

- * That is, iff the cardinality of e in a is - * equal to the cardinality of e in b, - * for each element e in a or b. - * - * @param a the first collection, must not be null - * @param b the second collection, must not be null - * @return true iff the collections contain the same elements with the same cardinalities. - */ - public static boolean isEqualCollection(final Collection a, final Collection b) { - if (a.size() != b.size()) { - return false; - } - final CardinalityHelper helper = new CardinalityHelper(a, b); - if (helper.cardinalityA.size() != helper.cardinalityB.size()) { - return false; - } - for (final Object obj : helper.cardinalityA.keySet()) { - if (helper.freqA(obj) != helper.freqB(obj)) { - return false; - } - } - return true; - } - - /** - * Returns a {@link Map} mapping each unique element in the given - * {@link Collection} to an {@link Integer} representing the number - * of occurrences of that element in the {@link Collection}. - *

- * Only those elements present in the collection will appear as - * keys in the map. - * - * @param the type of object in the returned {@link Map}. This is a super type of . - * @param coll the collection to get the cardinality map for, must not be null - * @return the populated cardinality map - */ - public static Map getCardinalityMap(final Iterable coll) { - final Map count = new HashMap(); - for (final O obj : coll) { - final Integer c = count.get(obj); - if (c == null) { - count.put(obj, Integer.valueOf(1)); - } else { - count.put(obj, Integer.valueOf(c.intValue() + 1)); - } - } - return count; - } - - - /** - * Removes certain attributes of each object in the list - * @param originList - * @param exclusionSet - * @param - * @return - */ - public static List> getListByExclusion(List originList, Set exclusionSet) { - List> instanceList = new ArrayList<>(); - Map instanceMap; - for (T instance : originList) { - Map dataMap = new BeanMap(instance); - instanceMap = new LinkedHashMap<>(16,0.75f,true); - for (Object key : dataMap.keySet()) { - - if (exclusionSet.contains(key.toString())) { - continue; - } - instanceMap.put(key.toString(), dataMap.get(key)); - - } - instanceList.add(instanceMap); - } - return instanceList; - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java deleted file mode 100644 index b1d084f446..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ResUploadType; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; - -import static cn.escheduler.common.Constants.*; -import static cn.escheduler.common.utils.PropertyUtils.getBoolean; -import static cn.escheduler.common.utils.PropertyUtils.getString; - -/** - * common utils - */ -public class CommonUtils { - - private static final Logger logger = LoggerFactory.getLogger(CommonUtils.class); - - /** - * @return get the path of system environment variables - */ - public static String getSystemEnvPath() { - String envPath = getString(ESCHEDULER_ENV_PATH); - if (StringUtils.isEmpty(envPath)) { - envPath = System.getProperty("user.home") + File.separator + ".bash_profile"; - } - - return envPath; - } - - /** - * @return get queue implementation name - */ - public static String getQueueImplValue(){ - return getString(Constants.SCHEDULER_QUEUE_IMPL); - } - - /** - * - * @return is develop mode - */ - public static boolean isDevelopMode() { - return getBoolean(DEVELOPMENT_STATE); - } - - - - /** - * if upload resource is HDFS and kerberos startup is true , else false - * @return - */ - public static boolean getKerberosStartupState(){ - String resUploadStartupType = PropertyUtils.getString(cn.escheduler.common.Constants.RES_UPLOAD_STARTUP_TYPE); - ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); - Boolean kerberosStartupState = getBoolean(cn.escheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE); - return resUploadType == ResUploadType.HDFS && kerberosStartupState; - } - - /** - * load kerberos configuration - * @throws Exception - */ - public static void loadKerberosConf()throws Exception{ - if (CommonUtils.getKerberosStartupState()) { - System.setProperty(JAVA_SECURITY_KRB5_CONF, getString(JAVA_SECURITY_KRB5_CONF_PATH)); - Configuration configuration = new Configuration(); - configuration.set(HADOOP_SECURITY_AUTHENTICATION, KERBEROS); - UserGroupInformation.setConfiguration(configuration); - UserGroupInformation.loginUserFromKeytab(getString(LOGIN_USER_KEY_TAB_USERNAME), - getString(cn.escheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH)); - } - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/ConnectionUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/ConnectionUtils.java deleted file mode 100644 index 33e5d41b97..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/ConnectionUtils.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.*; - -public class ConnectionUtils { - - public static final Logger logger = LoggerFactory.getLogger(ConnectionUtils.class); - - private static ConnectionUtils instance; - - ConnectionUtils() { - } - - public static ConnectionUtils getInstance() { - if (null == instance) { - syncInit(); - } - return instance; - } - - private static synchronized void syncInit() { - if (instance == null) { - instance = new ConnectionUtils(); - } - } - - public void release(ResultSet rs, Statement stmt, Connection conn) { - try { - if (rs != null) { - rs.close(); - rs = null; - } - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e); - } finally { - try { - if (stmt != null) { - stmt.close(); - stmt = null; - } - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e); - } finally { - try { - if (conn != null) { - conn.close(); - conn = null; - } - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e); - } - } - } - } - - public static void releaseResource(ResultSet rs, PreparedStatement ps, Connection conn) { - ConnectionUtils.getInstance().release(rs,ps,conn); - if (null != rs) { - try { - rs.close(); - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } - } - - if (null != ps) { - try { - ps.close(); - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } - } - - if (null != conn) { - try { - conn.close(); - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } - } - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/DateUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/DateUtils.java deleted file mode 100644 index 045db346c6..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/DateUtils.java +++ /dev/null @@ -1,356 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.Constants; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.time.Instant; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; -import java.util.Calendar; -import java.util.Date; - -/** - * date utils - */ -public class DateUtils { - - private static final Logger logger = LoggerFactory.getLogger(DateUtils.class); - - /** - * java.util.Date to java.time.LocalDateTime - * use default zone - * @param date - * @return - */ - private static LocalDateTime date2LocalDateTime(Date date) { - return LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault()); - } - - /** - * java.time.LocalDateTime to java.util.Date - * use default zone - * @param localDateTime - * @return - */ - private static Date localDateTime2Date(LocalDateTime localDateTime) { - Instant instant = localDateTime.atZone(ZoneId.systemDefault()).toInstant(); - return Date.from(instant); - } - - /** - * @return get the formatted date string for the current time - */ - public static String getCurrentTime() { - return getCurrentTime(Constants.YYYY_MM_DD_HH_MM_SS); - } - - /** - * @param format - * @return get the date string in the specified format of the current time - */ - public static String getCurrentTime(String format) { -// return new SimpleDateFormat(format).format(new Date()); - return LocalDateTime.now().format(DateTimeFormatter.ofPattern(format)); - } - - /** - * @param date - * @param format e.g. yyyy-MM-dd HH:mm:ss - * @return get the formatted date string - */ - public static String format(Date date, String format) { -// return new SimpleDateFormat(format).format(date); - return format(date2LocalDateTime(date), format); - } - - /** - * @param localDateTime - * @param format e.g. yyyy-MM-dd HH:mm:ss - * @return get the formatted date string - */ - public static String format(LocalDateTime localDateTime, String format) { - return localDateTime.format(DateTimeFormatter.ofPattern(format)); - } - - /** - * @param date - * @return convert time to yyyy-MM-dd HH:mm:ss format - */ - public static String dateToString(Date date) { - return format(date, Constants.YYYY_MM_DD_HH_MM_SS); - } - - - /** - * @param date - * @return convert string to date and time - */ - public static Date parse(String date, String format) { - try { - // return new SimpleDateFormat(format).parse(date); - LocalDateTime ldt = LocalDateTime.parse(date, DateTimeFormatter.ofPattern(format)); - return localDateTime2Date(ldt); - } catch (Exception e) { - logger.error("error while parse date:" + date, e); - } - return null; - } - - /** - * convert date str to yyyy-MM-dd HH:mm:ss format - * - * @param str - * @return - */ - public static Date stringToDate(String str) { - return parse(str, Constants.YYYY_MM_DD_HH_MM_SS); - } - - /** - * get seconds between two dates - * - * @param d1 - * @param d2 - * @return - */ - public static long differSec(Date d1, Date d2) { - return (long) Math.ceil(differMs(d1, d2) / 1000.0); - } - - /** - * get ms between two dates - * - * @param d1 - * @param d2 - * @return - */ - public static long differMs(Date d1, Date d2) { - return Math.abs(d1.getTime() - d2.getTime()); - } - - - /** - * get hours between two dates - * - * @param d1 - * @param d2 - * @return - */ - public static long diffHours(Date d1, Date d2) { - return (long) Math.ceil(diffMin(d1, d2) / 60.0); - } - - /** - * get minutes between two dates - * - * @param d1 - * @param d2 - * @return - */ - public static long diffMin(Date d1, Date d2) { - return (long) Math.ceil(differSec(d1, d2) / 60.0); - } - - - /** - * get the date of the specified date in the days before and after - * - * @param date - * @param day - * @return - */ - public static Date getSomeDay(Date date, int day) { - Calendar calendar = Calendar.getInstance(); - calendar.setTime(date); - calendar.add(Calendar.DATE, day); - return calendar.getTime(); - } - - /** - * compare two dates - * - * @param future - * @param old - * @return - */ - public static boolean compare(Date future, Date old) { - return future.getTime() > old.getTime(); - } - - /** - * convert schedule string to date - * - * @param schedule - * @return - */ - public static Date getScheduleDate(String schedule) { - return stringToDate(schedule); - } - - /** - * format time to readable - * - * @param ms - * @return - */ - public static String format2Readable(long ms) { - - long days = ms / (1000 * 60 * 60 * 24); - long hours = (ms % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60); - long minutes = (ms % (1000 * 60 * 60)) / (1000 * 60); - long seconds = (ms % (1000 * 60)) / 1000; - - return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds); - - } - - /** - * get monday - *

- * note: Set the first day of the week to Monday, the default is Sunday - */ - public static Date getMonday(Date date) { - Calendar cal = Calendar.getInstance(); - - cal.setTime(date); - - cal.setFirstDayOfWeek(Calendar.MONDAY); - cal.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); - - return cal.getTime(); - } - - /** - * get sunday - *

- * note: Set the first day of the week to Monday, the default is Sunday - */ - public static Date getSunday(Date date) { - Calendar cal = Calendar.getInstance(); - cal.setTime(date); - - cal.setFirstDayOfWeek(Calendar.MONDAY); - cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY); - - return cal.getTime(); - } - - /** - * get first day of month - */ - public static Date getFirstDayOfMonth(Date date) { - Calendar cal = Calendar.getInstance(); - - cal.setTime(date); - cal.set(Calendar.DAY_OF_MONTH, 1); - - return cal.getTime(); - } - - /** - * get first day of month - */ - public static Date getSomeHourOfDay(Date date, int hours) { - Calendar cal = Calendar.getInstance(); - - cal.setTime(date); - cal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY) - hours); - cal.set(Calendar.MINUTE, 0); - cal.set(Calendar.SECOND, 0); - - return cal.getTime(); - } - - /** - * get last day of month - */ - public static Date getLastDayOfMonth(Date date) { - Calendar cal = Calendar.getInstance(); - - cal.setTime(date); - - cal.add(Calendar.MONTH, 1); - cal.set(Calendar.DAY_OF_MONTH, 1); - cal.add(Calendar.DAY_OF_MONTH, -1); - - return cal.getTime(); - } - - /** - * return YYYY-MM-DD 00:00:00 - * - * @param inputDay - * @return - */ - public static Date getStartOfDay(Date inputDay) { - Calendar cal = Calendar.getInstance(); - cal.setTime(inputDay); - cal.set(Calendar.HOUR_OF_DAY, 0); - cal.set(Calendar.MINUTE, 0); - cal.set(Calendar.SECOND, 0); - return cal.getTime(); - } - - /** - * return YYYY-MM-DD 23:59:59 - * - * @param inputDay - * @return - */ - public static Date getEndOfDay(Date inputDay) { - Calendar cal = Calendar.getInstance(); - cal.setTime(inputDay); - cal.set(Calendar.HOUR_OF_DAY, 23); - cal.set(Calendar.MINUTE, 59); - cal.set(Calendar.SECOND, 59); - return cal.getTime(); - } - - /** - * return YYYY-MM-DD 00:00:00 - * - * @param inputDay - * @return - */ - public static Date getStartOfHour(Date inputDay) { - Calendar cal = Calendar.getInstance(); - cal.setTime(inputDay); - cal.set(Calendar.MINUTE, 0); - cal.set(Calendar.SECOND, 0); - return cal.getTime(); - } - - /** - * return YYYY-MM-DD 23:59:59 - * - * @param inputDay - * @return - */ - public static Date getEndOfHour(Date inputDay) { - Calendar cal = Calendar.getInstance(); - cal.setTime(inputDay); - cal.set(Calendar.MINUTE, 59); - cal.set(Calendar.SECOND, 59); - return cal.getTime(); - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/DependentUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/DependentUtils.java deleted file mode 100644 index 0b4f566ae5..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/DependentUtils.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.enums.DependResult; -import cn.escheduler.common.enums.DependentRelation; -import cn.escheduler.common.model.DateInterval; -import cn.escheduler.common.utils.dependent.DependentDateUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -public class DependentUtils { - - private static final Logger logger = LoggerFactory.getLogger(DependentUtils.class); - - public static DependResult getDependResultForRelation(DependentRelation relation, - List dependResultList){ - - DependResult dependResult = DependResult.SUCCESS; - - switch (relation){ - case AND: - if(dependResultList.contains(DependResult.FAILED)){ - dependResult = DependResult.FAILED; - }else if(dependResultList.contains(DependResult.WAITING)){ - dependResult = DependResult.WAITING; - }else{ - dependResult = DependResult.SUCCESS; - } - break; - case OR: - if(dependResultList.contains(DependResult.SUCCESS)){ - dependResult = DependResult.SUCCESS; - }else if(dependResultList.contains(DependResult.WAITING)){ - dependResult = DependResult.WAITING; - }else{ - dependResult = DependResult.FAILED; - } - break; - default: - break; - } - return dependResult; - } - - - /** - * get date interval list by business date and date value. - * @param businessDate - * @param dateValue - * @return - */ - public static List getDateIntervalList(Date businessDate, String dateValue){ - List result = new ArrayList<>(); - switch (dateValue){ - case "last1Hour": - result = DependentDateUtils.getLastHoursInterval(businessDate, 1); - break; - case "last2Hours": - result = DependentDateUtils.getLastHoursInterval(businessDate, 2); - break; - case "last3Hours": - result = DependentDateUtils.getLastHoursInterval(businessDate, 3); - break; - case "today": - result = DependentDateUtils.getTodayInterval(businessDate); - break; - case "last1Days": - result = DependentDateUtils.getLastDayInterval(businessDate, 1); - break; - case "last2Days": - result = DependentDateUtils.getLastDayInterval(businessDate, 2); - break; - case "last3Days": - result = DependentDateUtils.getLastDayInterval(businessDate, 3); - break; - case "last7Days": - result = DependentDateUtils.getLastDayInterval(businessDate, 7); - break; - case "thisWeek": - result = DependentDateUtils.getThisWeekInterval(businessDate); - break; - case "lastWeek": - result = DependentDateUtils.getLastWeekInterval(businessDate); - break; - case "lastMonday": - result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 1); - break; - case "lastTuesday": - result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 2); - break; - case "lastWednesday": - result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 3); - break; - case "lastThursday": - result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 4); - break; - case "lastFriday": - result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 5); - break; - case "lastSaturday": - result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 6); - break; - case "lastSunday": - result = DependentDateUtils.getLastWeekOneDayInterval(businessDate, 7); - break; - case "thisMonth": - result = DependentDateUtils.getThisMonthInterval(businessDate); - break; - case "lastMonth": - result = DependentDateUtils.getLastMonthInterval(businessDate); - break; - case "lastMonthBegin": - result = DependentDateUtils.getLastMonthBeginInterval(businessDate, true); - break; - case "lastMonthEnd": - result = DependentDateUtils.getLastMonthBeginInterval(businessDate, false); - break; - default: - break; - } - return result; - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/EncryptionUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/EncryptionUtils.java deleted file mode 100644 index f246d6d421..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/EncryptionUtils.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.apache.commons.codec.digest.DigestUtils; -import org.apache.commons.lang3.StringUtils; - -/** - * encryption utils - */ -public class EncryptionUtils { - - - /** - * - * @param rawStr - * @return md5(rawStr) - */ - public static String getMd5(String rawStr) { - return DigestUtils.md5Hex(null == rawStr ? StringUtils.EMPTY : rawStr); - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/EnumFieldUtil.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/EnumFieldUtil.java deleted file mode 100644 index 27e1955fac..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/EnumFieldUtil.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -/** - * enum field util - */ -public class EnumFieldUtil { - /** - * Generate a string for the enums field - * - * @param field - * @param enumClass - * @return - */ - public static String genFieldStr(String field, Class enumClass) { - //TODO... - // delete this class when mybatisplus is ok - return ""; -// return "#{" + field + ",javaType=" + enumClass.getName() + ",typeHandler=" + EnumOrdinalTypeHandler.class.getName() + "}"; - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/FileUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/FileUtils.java deleted file mode 100644 index 205f894e04..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/FileUtils.java +++ /dev/null @@ -1,428 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.apache.commons.io.Charsets; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.nio.charset.Charset; -import java.nio.charset.UnsupportedCharsetException; - -import static cn.escheduler.common.Constants.*; -import static cn.escheduler.common.utils.PropertyUtils.getString; - -/** - * file utils - */ -public class FileUtils { - public static final Logger logger = LoggerFactory.getLogger(FileUtils.class); - - /** - * get file suffix - * - * @param filename - * @return file suffix - */ - public static String suffix(String filename) { - - String fileSuffix = ""; - if (StringUtils.isNotEmpty(filename)) { - int lastIndex = filename.lastIndexOf("."); - if (lastIndex > 0) { - fileSuffix = filename.substring(lastIndex + 1); - } - } - return fileSuffix; - } - - /** - * get download file absolute path and name - * - * @param filename - * @return download file name - */ - public static String getDownloadFilename(String filename) { - return String.format("%s/%s/%s", getString(DATA_DOWNLOAD_BASEDIR_PATH), DateUtils.getCurrentTime(YYYYMMDDHHMMSS), filename); - } - - /** - * get upload file absolute path and name - * - * @param tenantCode tenant code - * @param filename file name - * @return local file path - */ - public static String getUploadFilename(String tenantCode, String filename) { - return String.format("%s/%s/resources/%s",getString(DATA_BASEDIR_PATH), tenantCode, filename); - } - - /** - * directory of process execution - * @param projectId - * @param processDefineId - * @param processInstanceId - * @param taskInstanceId - * @return directory of process execution - */ - public static String getProcessExecDir(int projectId, int processDefineId, int processInstanceId, int taskInstanceId) { - - return String.format("%s/process/%s/%s/%s/%s", getString(PROCESS_EXEC_BASEPATH), Integer.toString(projectId), - Integer.toString(processDefineId), Integer.toString(processInstanceId),Integer.toString(taskInstanceId)); - } - - /** - * directory of process instances - * @param projectId - * @param processDefineId - * @param processInstanceId - * @return directory of process instances - */ - public static String getProcessExecDir(int projectId, int processDefineId, int processInstanceId) { - return String.format("%s/process/%s/%s/%s", getString(PROCESS_EXEC_BASEPATH), Integer.toString(projectId), - Integer.toString(processDefineId), Integer.toString(processInstanceId)); - } - - /** - * @return get suffixes for resource files that support online viewing - */ - public static String getResourceViewSuffixs() { - return getString(RESOURCE_VIEW_SUFFIXS); - } - - /** - * create directory and user - * @param execLocalPath - * @param userName - * @param logger - * @throws IOException - */ - public static void createWorkDirAndUserIfAbsent(String execLocalPath, String userName, Logger logger) throws IOException{ - //if work dir exists, first delete - File execLocalPathFile = new File(execLocalPath); - - if (execLocalPathFile.exists()){ - org.apache.commons.io.FileUtils.forceDelete(execLocalPathFile); - } - - //create work dir - org.apache.commons.io.FileUtils.forceMkdir(execLocalPathFile); - - - //if not exists this user,then create - if (!OSUtils.getUserList().contains(userName)){ - String userGroup = OSUtils.getGroup(); - if (org.apache.commons.lang3.StringUtils.isNotEmpty(userGroup)){ - logger.info("create os user : {}",userName); - String cmd = String.format("sudo useradd -g %s %s",userGroup,userName); - - logger.info("execute cmd : {}",cmd); - OSUtils.exeCmd(cmd); - } - } - - } - - - /** - * write content to file ,if parent path not exists, it will do one's utmost to mkdir - * - * @param content content - * @param filePath target file path - * @return - */ - public static boolean writeContent2File(String content, String filePath) { - boolean flag = true; - BufferedReader bufferedReader = null; - BufferedWriter bufferedWriter = null; - try { - File distFile = new File(filePath); - if (!distFile.getParentFile().exists()) { - distFile.getParentFile().mkdirs(); - } - bufferedReader = new BufferedReader(new StringReader(content)); - bufferedWriter = new BufferedWriter(new FileWriter(distFile)); - char buf[] = new char[1024]; - int len; - while ((len = bufferedReader.read(buf)) != -1) { - bufferedWriter.write(buf, 0, len); - } - bufferedWriter.flush(); - bufferedReader.close(); - bufferedWriter.close(); - } catch (IOException e) { - FileUtils.logger.error(e.getMessage(), e); - flag = false; - return flag; - } finally { - IOUtils.closeQuietly(bufferedWriter); - IOUtils.closeQuietly(bufferedReader); - } - return flag; - } - - /** - * Writes a String to a file creating the file if it does not exist. - * - * NOTE: As from v1.3, the parent directories of the file will be created - * if they do not exist. - * - * @param file the file to write - * @param data the content to write to the file - * @param encoding the encoding to use, {@code null} means platform default - * @throws IOException in case of an I/O error - * @throws java.io.UnsupportedEncodingException if the encoding is not supported by the VM - * @since 2.4 - */ - public static void writeStringToFile(File file, String data, Charset encoding) throws IOException { - writeStringToFile(file, data, encoding, false); - } - - /** - * Writes a String to a file creating the file if it does not exist. - * - * NOTE: As from v1.3, the parent directories of the file will be created - * if they do not exist. - * - * @param file the file to write - * @param data the content to write to the file - * @param encoding the encoding to use, {@code null} means platform default - * @throws IOException in case of an I/O error - * @throws java.io.UnsupportedEncodingException if the encoding is not supported by the VM - */ - public static void writeStringToFile(File file, String data, String encoding) throws IOException { - writeStringToFile(file, data, encoding, false); - } - - /** - * Writes a String to a file creating the file if it does not exist. - * - * @param file the file to write - * @param data the content to write to the file - * @param encoding the encoding to use, {@code null} means platform default - * @param append if {@code true}, then the String will be added to the - * end of the file rather than overwriting - * @throws IOException in case of an I/O error - * @since 2.3 - */ - public static void writeStringToFile(File file, String data, Charset encoding, boolean append) throws IOException { - OutputStream out = null; - try { - out = openOutputStream(file, append); - IOUtils.write(data, out, encoding); - out.close(); // don't swallow close Exception if copy completes normally - } finally { - IOUtils.closeQuietly(out); - } - } - - /** - * Writes a String to a file creating the file if it does not exist. - * - * @param file the file to write - * @param data the content to write to the file - * @param encoding the encoding to use, {@code null} means platform default - * @param append if {@code true}, then the String will be added to the - * end of the file rather than overwriting - * @throws IOException in case of an I/O error - * @throws UnsupportedCharsetException - * thrown instead of {@link UnsupportedEncodingException} in version 2.2 if the encoding is not - * supported by the VM - * @since 2.1 - */ - public static void writeStringToFile(File file, String data, String encoding, boolean append) throws IOException { - writeStringToFile(file, data, Charsets.toCharset(encoding), append); - } - - /** - * Writes a String to a file creating the file if it does not exist using the default encoding for the VM. - * - * @param file the file to write - * @param data the content to write to the file - * @throws IOException in case of an I/O error - */ - public static void writeStringToFile(File file, String data) throws IOException { - writeStringToFile(file, data, Charset.defaultCharset(), false); - } - - /** - * Writes a String to a file creating the file if it does not exist using the default encoding for the VM. - * - * @param file the file to write - * @param data the content to write to the file - * @param append if {@code true}, then the String will be added to the - * end of the file rather than overwriting - * @throws IOException in case of an I/O error - * @since 2.1 - */ - public static void writeStringToFile(File file, String data, boolean append) throws IOException { - writeStringToFile(file, data, Charset.defaultCharset(), append); - } - - /** - * Opens a {@link FileOutputStream} for the specified file, checking and - * creating the parent directory if it does not exist. - *

- * At the end of the method either the stream will be successfully opened, - * or an exception will have been thrown. - *

- * The parent directory will be created if it does not exist. - * The file will be created if it does not exist. - * An exception is thrown if the file object exists but is a directory. - * An exception is thrown if the file exists but cannot be written to. - * An exception is thrown if the parent directory cannot be created. - * - * @param file the file to open for output, must not be {@code null} - * @return a new {@link FileOutputStream} for the specified file - * @throws IOException if the file object is a directory - * @throws IOException if the file cannot be written to - * @throws IOException if a parent directory needs creating but that fails - * @since 1.3 - */ - public static FileOutputStream openOutputStream(File file) throws IOException { - return openOutputStream(file, false); - } - - /** - * Opens a {@link FileOutputStream} for the specified file, checking and - * creating the parent directory if it does not exist. - *

- * At the end of the method either the stream will be successfully opened, - * or an exception will have been thrown. - *

- * The parent directory will be created if it does not exist. - * The file will be created if it does not exist. - * An exception is thrown if the file object exists but is a directory. - * An exception is thrown if the file exists but cannot be written to. - * An exception is thrown if the parent directory cannot be created. - * - * @param file the file to open for output, must not be {@code null} - * @param append if {@code true}, then bytes will be added to the - * end of the file rather than overwriting - * @return a new {@link FileOutputStream} for the specified file - * @throws IOException if the file object is a directory - * @throws IOException if the file cannot be written to - * @throws IOException if a parent directory needs creating but that fails - * @since 2.1 - */ - public static FileOutputStream openOutputStream(File file, boolean append) throws IOException { - if (file.exists()) { - if (file.isDirectory()) { - throw new IOException("File '" + file + "' exists but is a directory"); - } - if (file.canWrite() == false) { - throw new IOException("File '" + file + "' cannot be written to"); - } - } else { - File parent = file.getParentFile(); - if (parent != null) { - if (!parent.mkdirs() && !parent.isDirectory()) { - throw new IOException("Directory '" + parent + "' could not be created"); - } - } - } - return new FileOutputStream(file, append); - } - - - /** - * deletes a directory recursively - * @param dir - */ - - public static void deleteDir(String dir) throws IOException { - org.apache.commons.io.FileUtils.deleteDirectory(new File(dir)); - } - - /** - * Deletes a file. If file is a directory, delete it and all sub-directories. - *

- * The difference between File.delete() and this method are: - *

    - *
  • A directory to be deleted does not have to be empty.
  • - *
  • You get exceptions when a file or directory cannot be deleted. - * (java.io.File methods returns a boolean)
  • - *
- * - * @param filename - * @throws IOException in case deletion is unsuccessful - */ - public static void deleteFile(String filename) throws IOException { - org.apache.commons.io.FileUtils.forceDelete(new File(filename)); - } - - /** - * Gets all the parent subdirectories of the parentDir directory - * @param parentDir - * @return - */ - public static File[] getAllDir(String parentDir){ - if(parentDir == null || "".equals(parentDir)) { - throw new RuntimeException("parentDir can not be empty"); - } - - File file = new File(parentDir); - if(!file.exists() || !file.isDirectory()) { - throw new RuntimeException("parentDir not exist, or is not a directory:"+parentDir); - } - - File[] schemaDirs = file.listFiles(new FileFilter() { - - @Override - public boolean accept(File pathname) { - if (pathname.isDirectory()) { - return true; - } - else { - return false; - } - } - }); - - return schemaDirs; - } - - /** - * Get Content - * @param inputStream - * @return - * @throws IOException - */ - public static String readFile2Str(InputStream inputStream) throws IOException{ - String all_content=null; - try { - all_content = new String(); - InputStream ins = inputStream; - ByteArrayOutputStream outputstream = new ByteArrayOutputStream(); - byte[] str_b = new byte[1024]; - int i = -1; - while ((i=ins.read(str_b)) > 0) { - outputstream.write(str_b,0,i); - } - all_content = outputstream.toString(); - return all_content; - } catch (Exception e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e); - } - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java deleted file mode 100644 index 6bc33eca42..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java +++ /dev/null @@ -1,585 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.ResUploadType; -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONException; -import com.alibaba.fastjson.JSONObject; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.client.cli.RMAdminCLI; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.security.PrivilegedExceptionAction; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static cn.escheduler.common.Constants.*; -import static cn.escheduler.common.utils.PropertyUtils.*; -import static cn.escheduler.common.utils.PropertyUtils.getString; - -/** - * hadoop utils - * single instance - */ -public class HadoopUtils implements Closeable { - - private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class); - - private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER); - private static volatile HadoopUtils instance = new HadoopUtils(); - private static volatile Configuration configuration; - private static FileSystem fs; - - - private HadoopUtils(){ - if(StringUtils.isEmpty(hdfsUser)){ - hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER); - } - init(); - initHdfsPath(); - } - - public static HadoopUtils getInstance(){ - // if kerberos startup , renew HadoopUtils - if (CommonUtils.getKerberosStartupState()){ - return new HadoopUtils(); - } - return instance; - } - - /** - * init escheduler root path in hdfs - */ - private void initHdfsPath(){ - String hdfsPath = getString(Constants.DATA_STORE_2_HDFS_BASEPATH); - Path path = new Path(hdfsPath); - - try { - if (!fs.exists(path)) { - fs.mkdirs(path); - } - } catch (Exception e) { - logger.error(e.getMessage(),e); - } - } - - - /** - * init hadoop configuration - */ - private void init() { - if (configuration == null) { - synchronized (HadoopUtils.class) { - if (configuration == null) { - try { - configuration = new Configuration(); - - String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE); - ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); - - if (resUploadType == ResUploadType.HDFS){ - if (getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)){ - System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, - getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)); - configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION,"kerberos"); - UserGroupInformation.setConfiguration(configuration); - UserGroupInformation.loginUserFromKeytab(getString(Constants.LOGIN_USER_KEY_TAB_USERNAME), - getString(Constants.LOGIN_USER_KEY_TAB_PATH)); - } - - String defaultFS = configuration.get(FS_DEFAULTFS); - //first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file - // the default is the local file system - if(defaultFS.startsWith("file")){ - String defaultFSProp = getString(FS_DEFAULTFS); - if(StringUtils.isNotBlank(defaultFSProp)){ - Map fsRelatedProps = getPrefixedProperties("fs."); - configuration.set(FS_DEFAULTFS,defaultFSProp); - fsRelatedProps.entrySet().stream().forEach(entry -> configuration.set(entry.getKey(), entry.getValue())); - }else{ - logger.error("property:{} can not to be empty, please set!"); - throw new RuntimeException("property:{} can not to be empty, please set!"); - } - }else{ - logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", FS_DEFAULTFS, defaultFS); - } - - if (fs == null) { - if(StringUtils.isNotEmpty(hdfsUser)){ - //UserGroupInformation ugi = UserGroupInformation.createProxyUser(hdfsUser,UserGroupInformation.getLoginUser()); - UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser); - ugi.doAs(new PrivilegedExceptionAction() { - @Override - public Boolean run() throws Exception { - fs = FileSystem.get(configuration); - return true; - } - }); - }else{ - logger.warn("hdfs.root.user is not set value!"); - fs = FileSystem.get(configuration); - } - } - }else if (resUploadType == ResUploadType.S3){ - configuration.set(FS_DEFAULTFS,getString(FS_DEFAULTFS)); - configuration.set(FS_S3A_ENDPOINT,getString(FS_S3A_ENDPOINT)); - configuration.set(FS_S3A_ACCESS_KEY,getString(FS_S3A_ACCESS_KEY)); - configuration.set(FS_S3A_SECRET_KEY,getString(FS_S3A_SECRET_KEY)); - fs = FileSystem.get(configuration); - } - - - String rmHaIds = getString(YARN_RESOURCEMANAGER_HA_RM_IDS); - String appAddress = getString(Constants.YARN_APPLICATION_STATUS_ADDRESS); - if (!StringUtils.isEmpty(rmHaIds)) { - appAddress = getAppAddress(appAddress, rmHaIds); - logger.info("appAddress : {}", appAddress); - } - configuration.set(Constants.YARN_APPLICATION_STATUS_ADDRESS, appAddress); - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - - } - } - } - } - - /** - * @return Configuration - */ - public Configuration getConfiguration() { - return configuration; - } - - /** - * get application url - * - * @param applicationId - * @return - */ - public String getApplicationUrl(String applicationId) { - return String.format(configuration.get(YARN_APPLICATION_STATUS_ADDRESS), applicationId); - } - - /** - * cat file on hdfs - * - * @param hdfsFilePath hdfs file path - * @return byte[] - */ - public byte[] catFile(String hdfsFilePath) throws IOException { - - if(StringUtils.isBlank(hdfsFilePath)){ - logger.error("hdfs file path:{} is blank",hdfsFilePath); - return null; - } - - FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath)); - return IOUtils.toByteArray(fsDataInputStream); - } - - - - /** - * cat file on hdfs - * - * @param hdfsFilePath hdfs file path - * @param skipLineNums skip line numbers - * @param limit read how many lines - * @return - */ - public List catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException { - - if (StringUtils.isBlank(hdfsFilePath)){ - logger.error("hdfs file path:{} is blank",hdfsFilePath); - return null; - } - - FSDataInputStream in = fs.open(new Path(hdfsFilePath)); - BufferedReader br = new BufferedReader(new InputStreamReader(in)); - Stream stream = br.lines().skip(skipLineNums).limit(limit); - return stream.collect(Collectors.toList()); - } - - /** - * make the given file and all non-existent parents into - * directories. Has the semantics of Unix 'mkdir -p'. - * Existence of the directory hierarchy is not an error. - * - * @param hdfsPath path to create - */ - public boolean mkdir(String hdfsPath) throws IOException { - return fs.mkdirs(new Path(hdfsPath)); - } - - /** - * copy files between FileSystems - * - * @param srcPath source hdfs path - * @param dstPath destination hdfs path - * @param deleteSource whether to delete the src - * @param overwrite whether to overwrite an existing file - * @return 是否成功 - */ - public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { - return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf()); - } - - /** - * the src file is on the local disk. Add it to FS at - * the given dst name. - - * @param srcFile local file - * @param dstHdfsPath destination hdfs path - * @param deleteSource whether to delete the src - * @param overwrite whether to overwrite an existing file - */ - public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException { - Path srcPath = new Path(srcFile); - Path dstPath= new Path(dstHdfsPath); - - fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath); - - return true; - } - - /** - * copy hdfs file to local - * - * @param srcHdfsFilePath source hdfs file path - * @param dstFile destination file - * @param deleteSource delete source - * @param overwrite overwrite - * @return - * @throws IOException - */ - public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, boolean overwrite) throws IOException { - Path srcPath = new Path(srcHdfsFilePath); - File dstPath = new File(dstFile); - - if (dstPath.exists()) { - if (dstPath.isFile()) { - if (overwrite) { - dstPath.delete(); - } - } else { - logger.error("destination file must be a file"); - } - } - - if(!dstPath.getParentFile().exists()){ - dstPath.getParentFile().mkdirs(); - } - - return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf()); - } - - /** - * - * delete a file - * - * @param hdfsFilePath the path to delete. - * @param recursive if path is a directory and set to - * true, the directory is deleted else throws an exception. In - * case of a file the recursive can be set to either true or false. - * @return true if delete is successful else false. - * @throws IOException - */ - public boolean delete(String hdfsFilePath, boolean recursive) throws IOException { - return fs.delete(new Path(hdfsFilePath), recursive); - } - - /** - * check if exists - * - * @param hdfsFilePath source file path - * @return - */ - public boolean exists(String hdfsFilePath) throws IOException { - return fs.exists(new Path(hdfsFilePath)); - } - - /** - * Gets a list of files in the directory - * - * @param filePath - * @return {@link FileStatus} - */ - public FileStatus[] listFileStatus(String filePath)throws Exception{ - Path path = new Path(filePath); - try { - return fs.listStatus(new Path(filePath)); - } catch (IOException e) { - logger.error("Get file list exception", e); - throw new Exception("Get file list exception", e); - } - } - - /** - * Renames Path src to Path dst. Can take place on local fs - * or remote DFS. - * @param src path to be renamed - * @param dst new path after rename - * @throws IOException on failure - * @return true if rename is successful - */ - public boolean rename(String src, String dst) throws IOException { - return fs.rename(new Path(src), new Path(dst)); - } - - - /** - * get the state of an application - * - * @param applicationId - * @return the return may be null or there may be other parse exceptions - * @throws JSONException - * @throws IOException - */ - public ExecutionStatus getApplicationStatus(String applicationId) throws JSONException { - if (StringUtils.isEmpty(applicationId)) { - return null; - } - - String applicationUrl = getApplicationUrl(applicationId); - - String responseContent = HttpUtils.get(applicationUrl); - - JSONObject jsonObject = JSONObject.parseObject(responseContent); - String result = jsonObject.getJSONObject("app").getString("finalStatus"); - - switch (result) { - case ACCEPTED: - return ExecutionStatus.SUBMITTED_SUCCESS; - case SUCCEEDED: - return ExecutionStatus.SUCCESS; - case NEW: - case NEW_SAVING: - case SUBMITTED: - case FAILED: - return ExecutionStatus.FAILURE; - case KILLED: - return ExecutionStatus.KILL; - - case RUNNING: - default: - return ExecutionStatus.RUNNING_EXEUTION; - } - } - - /** - * - * @return data hdfs path - */ - public static String getHdfsDataBasePath() { - String basePath = getString(DATA_STORE_2_HDFS_BASEPATH); - if ("/".equals(basePath)) { - // if basepath is configured to /, the generated url may be //default/resources (with extra leading /) - return ""; - } else { - return basePath; - } - } - - /** - * hdfs resource dir - * - * @param tenantCode tenant code - * @return hdfs resource dir - */ - public static String getHdfsResDir(String tenantCode) { - return String.format("%s/resources", getHdfsTenantDir(tenantCode)); - } - - /** - * hdfs user dir - * - * @param tenantCode tenant code - * @return hdfs resource dir - */ - public static String getHdfsUserDir(String tenantCode,int userId) { - return String.format("%s/home/%d", getHdfsTenantDir(tenantCode),userId); - } - - /** - * hdfs udf dir - * - * @param tenantCode tenant code - * @return get udf dir on hdfs - */ - public static String getHdfsUdfDir(String tenantCode) { - return String.format("%s/udfs", getHdfsTenantDir(tenantCode)); - } - - /** - * get absolute path and name for file on hdfs - * - * @param tenantCode tenant code - * @param filename file name - * @return get absolute path and name for file on hdfs - */ - public static String getHdfsFilename(String tenantCode, String filename) { - return String.format("%s/%s", getHdfsResDir(tenantCode), filename); - } - - /** - * get absolute path and name for udf file on hdfs - * - * @param tenantCode tenant code - * @param filename file name - * @return get absolute path and name for udf file on hdfs - */ - public static String getHdfsUdfFilename(String tenantCode, String filename) { - return String.format("%s/%s", getHdfsUdfDir(tenantCode), filename); - } - - /** - * @return file directory of tenants on hdfs - */ - public static String getHdfsTenantDir(String tenantCode) { - return String.format("%s/%s", getHdfsDataBasePath(), tenantCode); - } - - - /** - * getAppAddress - * - * @param appAddress - * @param rmHa - * @return - */ - public static String getAppAddress(String appAddress, String rmHa) { - - //get active ResourceManager - String activeRM = YarnHAAdminUtils.getAcitveRMName(rmHa); - - String[] split1 = appAddress.split(DOUBLE_SLASH); - - if (split1.length != 2) { - return null; - } - - String start = split1[0] + DOUBLE_SLASH; - String[] split2 = split1[1].split(COLON); - - if (split2.length != 2) { - return null; - } - - String end = COLON + split2[1]; - - return start + activeRM + end; - } - - - @Override - public void close() throws IOException { - if (fs != null) { - try { - fs.close(); - } catch (IOException e) { - logger.error("Close HadoopUtils instance failed", e); - throw new IOException("Close HadoopUtils instance failed", e); - } - } - } - - - /** - * yarn ha admin utils - */ - private static final class YarnHAAdminUtils extends RMAdminCLI { - - private static final Logger logger = LoggerFactory.getLogger(YarnHAAdminUtils.class); - - /** - * get active resourcemanager - * - * @param rmIds - * @return - */ - public static String getAcitveRMName(String rmIds) { - - String[] rmIdArr = rmIds.split(COMMA); - - int activeResourceManagerPort = getInt(HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT, 8088); - - String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info"; - - String state = null; - try { - /** - * send http get request to rm1 - */ - state = getRMState(String.format(yarnUrl, rmIdArr[0])); - - if (HADOOP_RM_STATE_ACTIVE.equals(state)) { - return rmIdArr[0]; - } else if (HADOOP_RM_STATE_STANDBY.equals(state)) { - state = getRMState(String.format(yarnUrl, rmIdArr[1])); - if (HADOOP_RM_STATE_ACTIVE.equals(state)) { - return rmIdArr[1]; - } - } else { - return null; - } - } catch (Exception e) { - state = getRMState(String.format(yarnUrl, rmIdArr[1])); - if (HADOOP_RM_STATE_ACTIVE.equals(state)) { - return rmIdArr[0]; - } - } - return null; - } - - - /** - * get ResourceManager state - * - * @param url - * @return - */ - public static String getRMState(String url) { - - String retStr = HttpUtils.get(url); - - if (StringUtils.isEmpty(retStr)) { - return null; - } - //to json - JSONObject jsonObject = JSON.parseObject(retStr); - - //get ResourceManager state - String state = jsonObject.getJSONObject("clusterInfo").getString("haState"); - return state; - } - - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/HttpUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/HttpUtils.java deleted file mode 100644 index d0e3ccf9ee..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/HttpUtils.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.Constants; -import org.apache.http.HttpEntity; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.util.EntityUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; - -/** - * http utils - */ -public class HttpUtils { - - - public static final Logger logger = LoggerFactory.getLogger(HttpUtils.class); - - /** - * get http request content - * @param url - * @return http response - */ - public static String get(String url){ - CloseableHttpClient httpclient = HttpClients.createDefault(); - - HttpGet httpget = new HttpGet(url); - /** set timeout、request time、socket timeout */ - RequestConfig requestConfig = RequestConfig.custom().setConnectTimeout(Constants.HTTP_CONNECT_TIMEOUT) - .setConnectionRequestTimeout(Constants.HTTP_CONNECTION_REQUEST_TIMEOUT) - .setSocketTimeout(Constants.SOCKET_TIMEOUT) - .setRedirectsEnabled(true) - .build(); - httpget.setConfig(requestConfig); - String responseContent = null; - CloseableHttpResponse response = null; - - try { - response = httpclient.execute(httpget); - //check response status is 200 - if (response.getStatusLine().getStatusCode() == 200) { - HttpEntity entity = response.getEntity(); - if (entity != null) { - responseContent = EntityUtils.toString(entity, Constants.UTF_8); - }else{ - logger.warn("http entity is null"); - } - }else{ - logger.error("htt get:{} response status code is not 200!"); - } - }catch (Exception e){ - logger.error(e.getMessage(),e); - }finally { - try { - if (response != null) { - EntityUtils.consume(response.getEntity()); - response.close(); - } - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - - if (httpget != null && !httpget.isAborted()) { - httpget.releaseConnection(); - httpget.abort(); - } - - if (httpclient != null) { - try { - httpclient.close(); - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - } - } - return responseContent; - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/IpUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/IpUtils.java deleted file mode 100644 index 972135ae89..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/IpUtils.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -/** - * http utils - */ -public class IpUtils { - - private static final Logger logger = LoggerFactory.getLogger(IpUtils.class); - public static final String DOT = "."; - - /** - * ip str to long

- * - * @param ipStr ip string - */ - public static Long ipToLong(String ipStr) { - String[] ipSet = ipStr.split("\\" + DOT); - - return Long.parseLong(ipSet[0]) << 24 | Long.parseLong(ipSet[1]) << 16 | Long.parseLong(ipSet[2]) << 8 | Long.parseLong(ipSet[3]); - } - - /** - * long to ip - * @param ipLong the long number converted from IP - * @return String - */ - public static String longToIp(long ipLong) { - long[] ipNumbers = new long[4]; - long tmp = 0xFF; - ipNumbers[0] = ipLong >> 24 & tmp; - ipNumbers[1] = ipLong >> 16 & tmp; - ipNumbers[2] = ipLong >> 8 & tmp; - ipNumbers[3] = ipLong & tmp; - - StringBuilder sb = new StringBuilder(16); - sb.append(ipNumbers[0]).append(DOT) - .append(ipNumbers[1]).append(DOT) - .append(ipNumbers[2]).append(DOT) - .append(ipNumbers[3]); - return sb.toString(); - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/JSONUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/JSONUtils.java deleted file mode 100644 index f48c7306aa..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/JSONUtils.java +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import com.alibaba.fastjson.JSONArray; -import com.alibaba.fastjson.JSONObject; -import com.alibaba.fastjson.TypeReference; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.databind.*; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.*; - -/** - * json utils - */ -public class JSONUtils { - - private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class); - - /** - * can use static singleton, inject: just make sure to reuse! - */ - private static final ObjectMapper objectMapper = new ObjectMapper(); - - /** - * init - */ - private static final JSONUtils instance = new JSONUtils(); - - - private JSONUtils() { - //Feature that determines whether encountering of unknown properties, false means not analyzer unknown properties - objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false).setTimeZone(TimeZone.getDefault()); - } - - /** - * json representation of object - * @param object - * @return object to json string - */ - public static String toJson(Object object) { - try{ - return JSONObject.toJSONString(object,false); - } catch (Exception e) { - logger.error("object to json exception!",e); - } - - return null; - } - - - /** - * - * This method deserializes the specified Json into an object of the specified class. It is not - * suitable to use if the specified class is a generic type since it will not have the generic - * type information because of the Type Erasure feature of Java. Therefore, this method should not - * be used if the desired type is a generic type. Note that this method works fine if the any of - * the fields of the specified object are generics, just the object itself should not be a - * generic type. - * - * @param json the string from which the object is to be deserialized - * @param clazz the class of T - * @return an object of type T from the string - * classOfT - */ - public static T parseObject(String json, Class clazz) { - if (StringUtils.isEmpty(json)) { - return null; - } - - try { - return JSONObject.parseObject(json, clazz); - } catch (Exception e) { - logger.error("parse object exception!",e); - } - return null; - } - - - /** - * json to list - * - * @param json - * @param clazz c - * @param - * @return - */ - public static List toList(String json, Class clazz) { - if (StringUtils.isEmpty(json)) { - return new ArrayList<>(); - } - try { - return JSONArray.parseArray(json, clazz); - } catch (Exception e) { - logger.error("JSONArray.parseArray exception!",e); - } - - return new ArrayList<>(); - } - - - - /** - * check json object valid - * - * @param json - * @return - */ - public static boolean checkJsonVaild(String json) { - - if (StringUtils.isEmpty(json)) { - return false; - } - - try { - objectMapper.readTree(json); - return true; - } catch (IOException e) { - logger.error("check json object valid exception!",e); - } - - return false; - } - - - /** - * Method for finding a JSON Object field with specified name in this - * node or its child nodes, and returning value it has. - * If no matching field is found in this node or its descendants, returns null. - * - * @param fieldName Name of field to look for - * - * @return Value of first matching node found, if any; null if none - */ - public static String findValue(JsonNode jsonNode, String fieldName) { - JsonNode node = jsonNode.findValue(fieldName); - - if (node == null) { - return null; - } - - return node.toString(); - } - - - /** - * json to map - * - * {@link #toMap(String, Class, Class)} - * - * @param json - * @return - */ - public static Map toMap(String json) { - if (StringUtils.isEmpty(json)) { - return null; - } - - try { - return JSONObject.parseObject(json, new TypeReference>(){}); - } catch (Exception e) { - logger.error("json to map exception!",e); - } - - return null; - } - - /** - * - * json to map - * - *

-   *         String jsonStr = "{\"id\":\"1001\",\"name\":\"Jobs\"}";
-   *         Map models = JSONUtils.toMap(jsonStr, String.class,String.class);
-   * 
- * @param json - * @param classK - * @param classV - * @param - * @param - * @return - */ - public static Map toMap(String json, Class classK, Class classV) { - if (StringUtils.isEmpty(json)) { - return null; - } - - try { - return JSONObject.parseObject(json, new TypeReference>() {}); - } catch (Exception e) { - logger.error("json to map exception!",e); - } - - return null; - } - - /** - * 对象装json字符串

- * - * @return json string - */ - public static String toJsonString(Object object) { - try{ - return JSONObject.toJSONString(object,false); - } catch (Exception e) { - throw new RuntimeException("Json deserialization exception.", e); - } - } - - public static JSONObject parseObject(String text) { - try{ - return JSONObject.parseObject(text); - } catch (Exception e) { - throw new RuntimeException("Json deserialization exception.", e); - } - } - - public static JSONArray parseArray(String text) { - try{ - return JSONObject.parseArray(text); - } catch (Exception e) { - throw new RuntimeException("Json deserialization exception.", e); - } - } - - - - /** - * json serializer - */ - public static class JsonDataSerializer extends JsonSerializer { - - @Override - public void serialize(String value, JsonGenerator gen, SerializerProvider provider) throws IOException { - gen.writeRawValue(value); - } - - } - - /** - * json data deserializer - */ - public static class JsonDataDeserializer extends JsonDeserializer { - - @Override - public String deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { - JsonNode node = p.getCodec().readTree(p); - return node.toString(); - } - - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/OSUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/OSUtils.java deleted file mode 100644 index 0cf06d3b02..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/OSUtils.java +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.shell.ShellExecutor; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import oshi.SystemInfo; -import oshi.hardware.CentralProcessor; -import oshi.hardware.GlobalMemory; -import oshi.hardware.HardwareAbstractionLayer; - -import java.io.BufferedReader; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.lang.management.ManagementFactory; -import java.lang.management.RuntimeMXBean; -import java.math.RoundingMode; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.text.DecimalFormat; -import java.util.ArrayList; -import java.util.List; - -/** - * os utils - * - */ -public class OSUtils { - - private static final Logger logger = LoggerFactory.getLogger(OSUtils.class); - - private static final SystemInfo SI = new SystemInfo(); - public static final String TWO_DECIMAL = "0.00"; - - private static HardwareAbstractionLayer hal = SI.getHardware(); - - private OSUtils() {} - - - /** - * get memory usage - * Keep 2 decimal - * @return percent % - */ - public static double memoryUsage() { - GlobalMemory memory = hal.getMemory(); - double memoryUsage = (memory.getTotal() - memory.getAvailable() - memory.getSwapUsed()) * 0.1 / memory.getTotal() * 10; - - DecimalFormat df = new DecimalFormat(TWO_DECIMAL); - df.setRoundingMode(RoundingMode.HALF_UP); - return Double.parseDouble(df.format(memoryUsage)); - } - - - /** - * get available physical memory size - * - * Keep 2 decimal - * @return available Physical Memory Size, unit: G - */ - public static double availablePhysicalMemorySize() { - GlobalMemory memory = hal.getMemory(); - double availablePhysicalMemorySize = (memory.getAvailable() + memory.getSwapUsed()) /1024.0/1024/1024; - - DecimalFormat df = new DecimalFormat(TWO_DECIMAL); - df.setRoundingMode(RoundingMode.HALF_UP); - return Double.parseDouble(df.format(availablePhysicalMemorySize)); - - } - - /** - * get total physical memory size - * - * Keep 2 decimal - * @return available Physical Memory Size, unit: G - */ - public static double totalMemorySize() { - GlobalMemory memory = hal.getMemory(); - double availablePhysicalMemorySize = memory.getTotal() /1024.0/1024/1024; - - DecimalFormat df = new DecimalFormat(TWO_DECIMAL); - df.setRoundingMode(RoundingMode.HALF_UP); - return Double.parseDouble(df.format(availablePhysicalMemorySize)); - } - - - /** - * load average - * - * @return - */ - public static double loadAverage() { - double loadAverage = hal.getProcessor().getSystemLoadAverage(); - - DecimalFormat df = new DecimalFormat(TWO_DECIMAL); - - df.setRoundingMode(RoundingMode.HALF_UP); - return Double.parseDouble(df.format(loadAverage)); - } - - /** - * get cpu usage - * - * @return - */ - public static double cpuUsage() { - CentralProcessor processor = hal.getProcessor(); - double cpuUsage = processor.getSystemCpuLoad(); - - DecimalFormat df = new DecimalFormat(TWO_DECIMAL); - df.setRoundingMode(RoundingMode.HALF_UP); - - return Double.parseDouble(df.format(cpuUsage)); - } - - - /** - * get user list - * - * @return - */ - public static List getUserList() { - List userList = new ArrayList<>(); - BufferedReader bufferedReader = null; - - try { - bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream("/etc/passwd"))); - String line; - - while ((line = bufferedReader.readLine()) != null) { - if (line.contains(":")) { - String[] userInfo = line.split(":"); - userList.add(userInfo[0]); - } - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } finally { - try { - bufferedReader.close(); - } catch (IOException e) { - logger.error(e.getMessage(), e); - } - } - - return userList; - } - - /** - * get system group information - * @return - * @throws IOException - */ - public static String getGroup() throws IOException { - String result = exeCmd("groups"); - - if (StringUtils.isNotEmpty(result)) { - String[] groupInfo = StringUtils.split(result); - return groupInfo[0]; - } - - return null; - } - - /** - * Execute the corresponding command of Linux or Windows - * - * @param command - * @return - * @throws IOException - */ - public static String exeCmd(String command) throws IOException { - BufferedReader br = null; - - try { - Process p = Runtime.getRuntime().exec(command); - br = new BufferedReader(new InputStreamReader(p.getInputStream())); - String line; - StringBuilder sb = new StringBuilder(); - - while ((line = br.readLine()) != null) { - sb.append(line + "\n"); - } - - return sb.toString(); - } finally { - if (br != null) { - try { - br.close(); - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - } - } - } - - /** - * Execute the shell - * @param command - * @return - * @throws IOException - */ - public static String exeShell(String command) throws IOException { - return ShellExecutor.execCommand(command); - } - - /** - * get process id - * @return - */ - public static int getProcessID() { - RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean(); - return Integer.parseInt(runtimeMXBean.getName().split("@")[0]); - } - - /** - * get local host - * @return - */ - public static String getHost(){ - try { - return InetAddress.getLocalHost().getHostAddress(); - } catch (UnknownHostException e) { - logger.error(e.getMessage(),e); - } - return null; - } - - - /** - * whether is macOS - */ - public static boolean isMacOS() { - String os = System.getProperty("os.name"); - return os.startsWith("Mac"); - } - - - /** - * whether is windows - */ - public static boolean isWindows() { - String os = System.getProperty("os.name"); - return os.startsWith("Windows"); - } - - - /** - * check memory and cpu usage - * @param conf - * @return - */ - public static Boolean checkResource(Configuration conf, Boolean isMaster){ - double systemCpuLoad; - double systemReservedMemory; - - if(isMaster){ - systemCpuLoad = conf.getDouble(Constants.MASTER_MAX_CPULOAD_AVG, Constants.defaultMasterCpuLoad); - systemReservedMemory = conf.getDouble(Constants.MASTER_RESERVED_MEMORY, Constants.defaultMasterReservedMemory); - }else{ - systemCpuLoad = conf.getDouble(Constants.WORKER_MAX_CPULOAD_AVG, Constants.defaultWorkerCpuLoad); - systemReservedMemory = conf.getDouble(Constants.WORKER_RESERVED_MEMORY, Constants.defaultWorkerReservedMemory); - } - - // judging usage - double loadAverage = OSUtils.loadAverage(); - // - double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); - - if(loadAverage > systemCpuLoad || availablePhysicalMemorySize < systemReservedMemory){ - logger.warn("load or availablePhysicalMemorySize(G) is too high, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize , loadAverage); - return false; - }else{ - return true; - } - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/ParameterUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/ParameterUtils.java deleted file mode 100644 index 50f4040ee6..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/ParameterUtils.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.enums.DataType; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.utils.placeholder.BusinessTimeUtils; -import cn.escheduler.common.utils.placeholder.PlaceholderUtils; -import cn.escheduler.common.utils.placeholder.TimePlaceholderUtils; -import com.alibaba.fastjson.JSONObject; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.time.DateUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.PreparedStatement; -import java.text.ParseException; -import java.util.*; - -/** - * parameter parse utils - */ -public class ParameterUtils { - - private static final Logger logger = LoggerFactory.getLogger(ParameterUtils.class); - - /** - * convert parameters place holders - * - * @param parameterString - * @param parameterMap - * @return - */ - public static String convertParameterPlaceholders(String parameterString, Map parameterMap) { - if (StringUtils.isEmpty(parameterString)) { - return parameterString; - } - - //Get current time, schedule execute time - String cronTimeStr = parameterMap.get(Constants.PARAMETER_DATETIME); - - Date cronTime = null; - - if (StringUtils.isNotEmpty(cronTimeStr)) { - try { - cronTime = DateUtils.parseDate(cronTimeStr, new String[]{Constants.PARAMETER_FORMAT_TIME}); - } catch (ParseException e) { - logger.error(String.format("parse %s exception", cronTimeStr), e); - } - } else { - cronTime = new Date(); - } - - // replace variable ${} form,refers to the replacement of system variables and custom variables - parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true); - - // replace time $[...] form, eg. $[yyyyMMdd] - if (cronTime != null) { - parameterString = TimePlaceholderUtils.replacePlaceholders(parameterString, cronTime, true); - } - - return parameterString; - } - - /** - * set in parameter - * @param index - * @param stmt - * @param dataType - * @param value - * @throws Exception - */ - public static void setInParameter(int index, PreparedStatement stmt, DataType dataType, String value)throws Exception{ - if (dataType.equals(DataType.VARCHAR)){ - stmt.setString(index,value); - }else if (dataType.equals(DataType.INTEGER)){ - stmt.setInt(index, Integer.parseInt(value)); - }else if (dataType.equals(DataType.LONG)){ - stmt.setLong(index, Long.parseLong(value)); - }else if (dataType.equals(DataType.FLOAT)){ - stmt.setFloat(index, Float.parseFloat(value)); - }else if (dataType.equals(DataType.DOUBLE)){ - stmt.setDouble(index, Double.parseDouble(value)); - }else if (dataType.equals(DataType.DATE)){ - stmt.setString(index, value); - }else if (dataType.equals(DataType.TIME)){ - stmt.setString(index, value); - }else if (dataType.equals(DataType.TIMESTAMP)){ - stmt.setString(index, value); - }else if (dataType.equals(DataType.BOOLEAN)){ - stmt.setBoolean(index,Boolean.parseBoolean(value)); - } - } - - /** - * curing user define parameters - * - * @return - */ - public static String curingGlobalParams(Map globalParamMap, List globalParamList, - CommandType commandType, Date scheduleTime){ - Map globalMap = new HashMap<>(); - if(globalParamMap!= null){ - globalMap.putAll(globalParamMap); - } - Map allParamMap = new HashMap<>(); - //如果是补数,需要传入一个补数时间,根据任务类型 - Map timeParams = BusinessTimeUtils - .getBusinessTime(commandType,scheduleTime); - - if (timeParams != null) { - allParamMap.putAll(timeParams); - } - - if (globalMap != null) { - allParamMap.putAll(globalMap); - } - - Set> entries = allParamMap.entrySet(); - - Map resolveMap = new HashMap<>(); - for (Map.Entry entry : entries){ - String val = entry.getValue(); - if (val.startsWith("$")){ - String str = ParameterUtils.convertParameterPlaceholders(val, allParamMap); - resolveMap.put(entry.getKey(),str); - } - } - - if (globalMap != null){ - globalMap.putAll(resolveMap); - } - - if (globalParamList != null && globalParamList.size() > 0){ - - for (Property property : globalParamList){ - String val = globalMap.get(property.getProp()); - if (val != null){ - property.setValue(val); - } - } - return JSONObject.toJSONString(globalParamList); - } - return null; - } - - - /** - * handle escapes - * @param inputString - * @return - */ - public static String handleEscapes(String inputString){ - - if(StringUtils.isNotEmpty(inputString)){ - return inputString.replace("%", "////%"); - } - return inputString; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/PropertyUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/PropertyUtils.java deleted file mode 100644 index 475cbfb72e..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/PropertyUtils.java +++ /dev/null @@ -1,215 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ResUploadType; -import org.apache.commons.io.IOUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -import static cn.escheduler.common.Constants.COMMON_PROPERTIES_PATH; -import static cn.escheduler.common.Constants.HADOOP_PROPERTIES_PATH; - -/** - * property utils - * single instance - */ -public class PropertyUtils { - - /** - * logger - */ - private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class); - - private static final Properties properties = new Properties(); - - private static final PropertyUtils propertyUtils = new PropertyUtils(); - - private PropertyUtils(){ - init(); - } - - private void init(){ - String[] propertyFiles = new String[]{HADOOP_PROPERTIES_PATH,COMMON_PROPERTIES_PATH}; - for (String fileName : propertyFiles) { - InputStream fis = null; - try { - fis = PropertyUtils.class.getResourceAsStream(fileName); - properties.load(fis); - - } catch (IOException e) { - logger.error(e.getMessage(), e); - System.exit(1); - } finally { - IOUtils.closeQuietly(fis); - } - } - } - - /** - * judge whether resource upload startup - * @return - */ - public static Boolean getResUploadStartupState(){ - String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE); - ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); - return resUploadType == ResUploadType.HDFS || resUploadType == ResUploadType.S3; - } - - /** - * get property value - * - * @param key property name - * @return - */ - public static String getString(String key) { - return properties.getProperty(key.trim()); - } - - /** - * get property value - * - * @param key property name - * @return get property int value , if key == null, then return -1 - */ - public static int getInt(String key) { - return getInt(key, -1); - } - - /** - * - * @param key - * @param defaultValue - * @return - */ - public static int getInt(String key, int defaultValue) { - String value = getString(key); - if (value == null) { - return defaultValue; - } - - try { - return Integer.parseInt(value); - } catch (NumberFormatException e) { - logger.info(e.getMessage(),e); - } - return defaultValue; - } - - /** - * get property value - * - * @param key property name - * @return - */ - public static Boolean getBoolean(String key) { - String value = properties.getProperty(key.trim()); - if(null != value){ - return Boolean.parseBoolean(value); - } - - return null; - } - - /** - * get property long value - * @param key - * @param defaultVal - * @return - */ - public static long getLong(String key, long defaultVal) { - String val = getString(key); - return val == null ? defaultVal : Long.parseLong(val); - } - - /** - * - * @param key - * @return - */ - public static long getLong(String key) { - return getLong(key,-1); - } - - /** - * - * @param key - * @param defaultVal - * @return - */ - public double getDouble(String key, double defaultVal) { - String val = getString(key); - return val == null ? defaultVal : Double.parseDouble(val); - } - - - /** - * get array - * @param key property name - * @param splitStr separator - * @return - */ - public static String[] getArray(String key, String splitStr) { - String value = getString(key); - if (value == null) { - return null; - } - try { - String[] propertyArray = value.split(splitStr); - return propertyArray; - } catch (NumberFormatException e) { - logger.info(e.getMessage(),e); - } - return null; - } - - /** - * - * @param key - * @param type - * @param defaultValue - * @param - * @return get enum value - */ - public > T getEnum(String key, Class type, - T defaultValue) { - String val = getString(key); - return val == null ? defaultValue : Enum.valueOf(type, val); - } - - /** - * get all properties with specified prefix, like: fs. - * @param prefix prefix to search - * @return - */ - public static Map getPrefixedProperties(String prefix) { - Map matchedProperties = new HashMap<>(); - for (String propName : properties.stringPropertyNames()) { - if (propName.startsWith(prefix)) { - matchedProperties.put(propName, properties.getProperty(propName)); - } - } - return matchedProperties; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/ResInfo.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/ResInfo.java deleted file mode 100644 index 6a48d6bc89..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/ResInfo.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.model.MasterServer; - -import java.util.Date; - -/** - * heartbeat for ZK reigster res info - */ -public class ResInfo { - - /** - * cpuUsage - */ - private double cpuUsage; - - /** - * memoryUsage - */ - private double memoryUsage; - - /** - * loadAverage - */ - private double loadAverage; - - public ResInfo(){} - - public ResInfo(double cpuUsage , double memoryUsage){ - this.cpuUsage = cpuUsage ; - this.memoryUsage = memoryUsage; - } - - public ResInfo(double cpuUsage, double memoryUsage, double loadAverage) { - this.cpuUsage = cpuUsage; - this.memoryUsage = memoryUsage; - this.loadAverage = loadAverage; - } - - public double getCpuUsage() { - return cpuUsage; - } - - public void setCpuUsage(double cpuUsage) { - this.cpuUsage = cpuUsage; - } - - public double getMemoryUsage() { - return memoryUsage; - } - - public void setMemoryUsage(double memoryUsage) { - this.memoryUsage = memoryUsage; - } - - public double getLoadAverage() { - return loadAverage; - } - - public void setLoadAverage(double loadAverage) { - this.loadAverage = loadAverage; - } - - /** - * get CPU and memory usage - * add cpu load average by lidong for service monitor - * @return - */ - public static String getResInfoJson(){ - ResInfo resInfo = new ResInfo(OSUtils.cpuUsage(), OSUtils.memoryUsage(),OSUtils.loadAverage()); - return JSONUtils.toJson(resInfo); - } - - - /** - * get CPU and memory usage - * @return - */ - public static String getResInfoJson(double cpuUsage , double memoryUsage){ - ResInfo resInfo = new ResInfo(cpuUsage,memoryUsage); - return JSONUtils.toJson(resInfo); - } - - - public static String getHeartBeatInfo(Date now){ - return buildHeartbeatForZKInfo(OSUtils.getHost(), - OSUtils.getProcessID(), - OSUtils.cpuUsage(), - OSUtils.memoryUsage(), - DateUtils.dateToString(now), - DateUtils.dateToString(now)); - - } - - /** - * build heartbeat info for zk - * @param host - * @param port - * @param cpuUsage - * @param memoryUsage - * @param createTime - * @param lastHeartbeatTime - * @return - */ - public static String buildHeartbeatForZKInfo(String host , int port , - double cpuUsage , double memoryUsage, - String createTime,String lastHeartbeatTime){ - - return host + Constants.COMMA + port + Constants.COMMA - + cpuUsage + Constants.COMMA - + memoryUsage + Constants.COMMA - + createTime + Constants.COMMA - + lastHeartbeatTime; - } - - /** - * parse heartbeat info for zk - * @param heartBeatInfo - * @return - */ - public static MasterServer parseHeartbeatForZKInfo(String heartBeatInfo){ - MasterServer masterServer = null; - String[] masterArray = heartBeatInfo.split(Constants.COMMA); - if(masterArray.length != 6){ - return masterServer; - - } - masterServer = new MasterServer(); - masterServer.setHost(masterArray[0]); - masterServer.setPort(Integer.parseInt(masterArray[1])); - masterServer.setResInfo(getResInfoJson(Double.parseDouble(masterArray[2]), Double.parseDouble(masterArray[3]))); - masterServer.setCreateTime(DateUtils.stringToDate(masterArray[4])); - masterServer.setLastHeartbeatTime(DateUtils.stringToDate(masterArray[5])); - return masterServer; - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/SchemaUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/SchemaUtils.java deleted file mode 100644 index 18f10b1bcd..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/SchemaUtils.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Metadata related common classes - * - */ -public class SchemaUtils { - - private static final Logger logger = LoggerFactory.getLogger(SchemaUtils.class); - private static Pattern p = Pattern.compile("\\s*|\t|\r|\n"); - - /** - * 获取所有upgrade目录下的可升级的schema - * Gets upgradable schemas for all upgrade directories - * @return - */ - @SuppressWarnings("unchecked") - public static List getAllSchemaList() { - List schemaDirList = new ArrayList<>(); - File[] schemaDirArr = FileUtils.getAllDir("sql/upgrade"); - if(schemaDirArr == null || schemaDirArr.length == 0) { - return null; - } - - for(File file : schemaDirArr) { - schemaDirList.add(file.getName()); - } - - Collections.sort(schemaDirList , new Comparator() { - @Override - public int compare(Object o1 , Object o2){ - try { - String dir1 = String.valueOf(o1); - String dir2 = String.valueOf(o2); - String version1 = dir1.split("_")[0]; - String version2 = dir2.split("_")[0]; - if(version1.equals(version2)) { - return 0; - } - - if(SchemaUtils.isAGreatVersion(version1, version2)) { - return 1; - } - - return -1; - - } catch (Exception e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e); - } - } - }); - - return schemaDirList; - } - - /** - * 判断schemaVersion是否比version版本高 - * Determine whether schemaVersion is higher than version - * @param schemaVersion - * @param version - * @return - */ - public static boolean isAGreatVersion(String schemaVersion, String version) { - if(StringUtils.isEmpty(schemaVersion) || StringUtils.isEmpty(version)) { - throw new RuntimeException("schemaVersion or version is empty"); - } - - String[] schemaVersionArr = schemaVersion.split("\\."); - String[] versionArr = version.split("\\."); - int arrLength = schemaVersionArr.length < versionArr.length ? schemaVersionArr.length : versionArr.length; - for(int i = 0 ; i < arrLength ; i++) { - if(Integer.valueOf(schemaVersionArr[i]) > Integer.valueOf(versionArr[i])) { - return true; - }else if(Integer.valueOf(schemaVersionArr[i]) < Integer.valueOf(versionArr[i])) { - return false; - } - } - - // 说明直到第arrLength-1个元素,两个版本号都一样,此时谁的arrLength大,谁的版本号就大 - // If the version and schema version is the same from 0 up to the arrlength-1 element,whoever has a larger arrLength has a larger version number - return schemaVersionArr.length > versionArr.length; - } - - /** - * Gets the current software version number of the system - * @return - */ - public static String getSoftVersion() { - String soft_version; - try { - soft_version = FileUtils.readFile2Str(new FileInputStream(new File("sql/soft_version"))); - soft_version = replaceBlank(soft_version); - } catch (FileNotFoundException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException("Failed to get the product version description file. The file could not be found", e); - } catch (IOException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException("Failed to get product version number description file, failed to read the file", e); - } - return soft_version; - } - - /** - * 去掉字符串中的空格回车换行和制表符 - * Strips the string of space carriage returns and tabs - * @param str - * @return - */ - public static String replaceBlank(String str) { - String dest = ""; - if (str!=null) { - - Matcher m = p.matcher(str); - dest = m.replaceAll(""); - } - return dest; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/ScriptRunner.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/ScriptRunner.java deleted file mode 100644 index a74d7e268c..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/ScriptRunner.java +++ /dev/null @@ -1,317 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.LineNumberReader; -import java.io.Reader; -import java.sql.*; - -/* - * Slightly modified version of the com.ibatis.common.jdbc.ScriptRunner class - * from the iBATIS Apache project. Only removed dependency on Resource class - * and a constructor - */ -/* - * Copyright 2004 Clinton Begin - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Tool to run database scripts - */ -public class ScriptRunner { - - public static final Logger logger = LoggerFactory.getLogger(ScriptRunner.class); - - private static final String DEFAULT_DELIMITER = ";"; - - private Connection connection; - - private boolean stopOnError; - private boolean autoCommit; - - private String delimiter = DEFAULT_DELIMITER; - private boolean fullLineDelimiter = false; - - /** - * Default constructor - */ - public ScriptRunner(Connection connection, boolean autoCommit, boolean stopOnError) { - this.connection = connection; - this.autoCommit = autoCommit; - this.stopOnError = stopOnError; - } - - public static void main(String[] args) { - String dbName = "db_mmu"; - String appKey = dbName.substring(dbName.lastIndexOf("_")+1, dbName.length()); - System.out.println(appKey); - } - - public void setDelimiter(String delimiter, boolean fullLineDelimiter) { - this.delimiter = delimiter; - this.fullLineDelimiter = fullLineDelimiter; - } - - /** - * Runs an SQL script (read in using the Reader parameter) - * - * @param reader - * - the source of the script - */ - public void runScript(Reader reader) throws IOException, SQLException { - try { - boolean originalAutoCommit = connection.getAutoCommit(); - try { - if (originalAutoCommit != this.autoCommit) { - connection.setAutoCommit(this.autoCommit); - } - runScript(connection, reader); - } finally { - connection.setAutoCommit(originalAutoCommit); - } - } catch (IOException e) { - throw e; - } catch (SQLException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException("Error running script. Cause: " + e, e); - } - } - - public void runScript(Reader reader, String dbName) throws IOException, SQLException { - try { - boolean originalAutoCommit = connection.getAutoCommit(); - try { - if (originalAutoCommit != this.autoCommit) { - connection.setAutoCommit(this.autoCommit); - } - runScript(connection, reader, dbName); - } finally { - connection.setAutoCommit(originalAutoCommit); - } - } catch (IOException e) { - throw e; - } catch (SQLException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException("Error running script. Cause: " + e, e); - } - } - - /** - * Runs an SQL script (read in using the Reader parameter) using the connection - * passed in - * - * @param conn - * - the connection to use for the script - * @param reader - * - the source of the script - * @throws SQLException - * if any SQL errors occur - * @throws IOException - * if there is an error reading from the Reader - */ - private void runScript(Connection conn, Reader reader) throws IOException, SQLException { - StringBuffer command = null; - try { - LineNumberReader lineReader = new LineNumberReader(reader); - String line = null; - while ((line = lineReader.readLine()) != null) { - if (command == null) { - command = new StringBuffer(); - } - String trimmedLine = line.trim(); - if (trimmedLine.startsWith("--")) { - logger.info(trimmedLine); - } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) { - // Do nothing - } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("--")) { - // Do nothing - - } else if (trimmedLine.startsWith("delimiter")) { - String newDelimiter = trimmedLine.split(" ")[1]; - this.setDelimiter(newDelimiter, fullLineDelimiter); - - } else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter()) - || fullLineDelimiter && trimmedLine.equals(getDelimiter())) { - command.append(line.substring(0, line.lastIndexOf(getDelimiter()))); - command.append(" "); - Statement statement = conn.createStatement(); - - // logger.info(command.toString()); - - boolean hasResults = false; - logger.info("sql:"+command.toString()); - if (stopOnError) { - hasResults = statement.execute(command.toString()); - } else { - try { - statement.execute(command.toString()); - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw e; - } - } - - ResultSet rs = statement.getResultSet(); - if (hasResults && rs != null) { - ResultSetMetaData md = rs.getMetaData(); - int cols = md.getColumnCount(); - for (int i = 0; i < cols; i++) { - String name = md.getColumnLabel(i); - logger.info(name + "\t"); - } - logger.info(""); - while (rs.next()) { - for (int i = 0; i < cols; i++) { - String value = rs.getString(i); - logger.info(value + "\t"); - } - logger.info(""); - } - } - - command = null; - try { - statement.close(); - } catch (Exception e) { - // Ignore to workaround a bug in Jakarta DBCP - } - Thread.yield(); - } else { - command.append(line); - command.append(" "); - } - } - - } catch (SQLException e) { - logger.error("Error executing: " + command.toString()); - throw e; - } catch (IOException e) { - e.fillInStackTrace(); - logger.error("Error executing: " + command.toString()); - throw e; - } - } - - private void runScript(Connection conn, Reader reader , String dbName) throws IOException, SQLException { - StringBuffer command = null; - String sql = ""; - String appKey = dbName.substring(dbName.lastIndexOf("_")+1, dbName.length()); - try { - LineNumberReader lineReader = new LineNumberReader(reader); - String line = null; - while ((line = lineReader.readLine()) != null) { - if (command == null) { - command = new StringBuffer(); - } - String trimmedLine = line.trim(); - if (trimmedLine.startsWith("--")) { - logger.info(trimmedLine); - } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) { - // Do nothing - } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("--")) { - // Do nothing - - } else if (trimmedLine.startsWith("delimiter")) { - String newDelimiter = trimmedLine.split(" ")[1]; - this.setDelimiter(newDelimiter, fullLineDelimiter); - - } else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter()) - || fullLineDelimiter && trimmedLine.equals(getDelimiter())) { - command.append(line.substring(0, line.lastIndexOf(getDelimiter()))); - command.append(" "); - Statement statement = conn.createStatement(); - - // logger.info(command.toString()); - - sql = command.toString().replaceAll("\\{\\{APPDB\\}\\}", dbName); - boolean hasResults = false; - logger.info("sql:"+sql); - if (stopOnError) { - hasResults = statement.execute(sql); - } else { - try { - statement.execute(sql); - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw e; - } - } - - ResultSet rs = statement.getResultSet(); - if (hasResults && rs != null) { - ResultSetMetaData md = rs.getMetaData(); - int cols = md.getColumnCount(); - for (int i = 0; i < cols; i++) { - String name = md.getColumnLabel(i); - logger.info(name + "\t"); - } - logger.info(""); - while (rs.next()) { - for (int i = 0; i < cols; i++) { - String value = rs.getString(i); - logger.info(value + "\t"); - } - logger.info(""); - } - } - - command = null; - try { - statement.close(); - } catch (Exception e) { - // Ignore to workaround a bug in Jakarta DBCP - } - Thread.yield(); - } else { - command.append(line); - command.append(" "); - } - } - - } catch (SQLException e) { - logger.error("Error executing: " + sql); - throw e; - } catch (IOException e) { - e.fillInStackTrace(); - logger.error("Error executing: " + sql); - throw e; - } - } - - private String getDelimiter() { - return delimiter; - } - -} \ No newline at end of file diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/TaskParametersUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/TaskParametersUtils.java deleted file mode 100644 index b3fcade4a7..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/TaskParametersUtils.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.enums.TaskType; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.dependent.DependentParameters; -import cn.escheduler.common.task.flink.FlinkParameters; -import cn.escheduler.common.task.http.HttpParameters; -import cn.escheduler.common.task.mr.MapreduceParameters; -import cn.escheduler.common.task.procedure.ProcedureParameters; -import cn.escheduler.common.task.python.PythonParameters; -import cn.escheduler.common.task.shell.ShellParameters; -import cn.escheduler.common.task.spark.SparkParameters; -import cn.escheduler.common.task.sql.SqlParameters; -import cn.escheduler.common.task.subprocess.SubProcessParameters; -import org.apache.commons.lang3.EnumUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -/** - * task parameters utils - */ -public class TaskParametersUtils { - - private static Logger logger = LoggerFactory.getLogger(TaskParametersUtils.class); - - /** - * get task parameters - * @param taskType - * @param parameter - * @return task parameters - */ - public static AbstractParameters getParameters(String taskType, String parameter) { - try { - switch (EnumUtils.getEnum(TaskType.class,taskType)) { - case SUB_PROCESS: - return JSONUtils.parseObject(parameter, SubProcessParameters.class); - case SHELL: - return JSONUtils.parseObject(parameter, ShellParameters.class); - case PROCEDURE: - return JSONUtils.parseObject(parameter, ProcedureParameters.class); - case SQL: - return JSONUtils.parseObject(parameter, SqlParameters.class); - case MR: - return JSONUtils.parseObject(parameter, MapreduceParameters.class); - case SPARK: - return JSONUtils.parseObject(parameter, SparkParameters.class); - case PYTHON: - return JSONUtils.parseObject(parameter, PythonParameters.class); - case DEPENDENT: - return JSONUtils.parseObject(parameter, DependentParameters.class); - case FLINK: - return JSONUtils.parseObject(parameter, FlinkParameters.class); - case HTTP: - return JSONUtils.parseObject(parameter, HttpParameters.class); - default: - return null; - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - return null; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/dependent/DependentDateUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/dependent/DependentDateUtils.java deleted file mode 100644 index 0127fe2ca9..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/dependent/DependentDateUtils.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils.dependent; - -import cn.escheduler.common.model.DateInterval; -import cn.escheduler.common.utils.DateUtils; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -public class DependentDateUtils { - - /** - * get last day interval list - * @param businessDate - * @param hourNumber - * @return - */ - public static List getLastHoursInterval(Date businessDate, int hourNumber){ - List dateIntervals = new ArrayList<>(); - for(int index = hourNumber; index > 0; index--){ - Date lastHour = DateUtils.getSomeHourOfDay(businessDate, index); - Date beginTime = DateUtils.getStartOfHour(lastHour); - Date endTime = DateUtils.getEndOfHour(lastHour); - dateIntervals.add(new DateInterval(beginTime, endTime)); - } - return dateIntervals; - } - - /** - * get today day interval list - * @param businessDate - * @return - */ - public static List getTodayInterval(Date businessDate){ - - List dateIntervals = new ArrayList<>(); - - Date beginTime = DateUtils.getStartOfDay(businessDate); - Date endTime = DateUtils.getEndOfDay(businessDate); - dateIntervals.add(new DateInterval(beginTime, endTime)); - return dateIntervals; - } - - /** - * get last day interval list - * @param businessDate - * @param someDay - * @return - */ - public static List getLastDayInterval(Date businessDate, int someDay){ - - List dateIntervals = new ArrayList<>(); - for(int index = someDay; index > 0; index--){ - Date lastDay = DateUtils.getSomeDay(businessDate, -index); - - Date beginTime = DateUtils.getStartOfDay(lastDay); - Date endTime = DateUtils.getEndOfDay(lastDay); - dateIntervals.add(new DateInterval(beginTime, endTime)); - } - return dateIntervals; - } - - /** - * get interval between this month first day and businessDate - * @param businessDate - * @return - */ - public static List getThisMonthInterval(Date businessDate) { - Date firstDay = DateUtils.getFirstDayOfMonth(businessDate); - return getDateIntervalListBetweenTwoDates(firstDay, businessDate); - } - - /** - * get interval between last month first day and last day - * @param businessDate - * @return - */ - public static List getLastMonthInterval(Date businessDate) { - - Date firstDayThisMonth = DateUtils.getFirstDayOfMonth(businessDate); - Date lastDay = DateUtils.getSomeDay(firstDayThisMonth, -1); - Date firstDay = DateUtils.getFirstDayOfMonth(lastDay); - return getDateIntervalListBetweenTwoDates( firstDay, lastDay); - } - - - /** - * get interval on first/last day of the last month - * @param businessDate - * @param isBeginDay - * @return - */ - public static List getLastMonthBeginInterval(Date businessDate, boolean isBeginDay) { - - Date firstDayThisMonth = DateUtils.getFirstDayOfMonth(businessDate); - Date lastDay = DateUtils.getSomeDay(firstDayThisMonth, -1); - Date firstDay = DateUtils.getFirstDayOfMonth(lastDay); - if(isBeginDay){ - return getDateIntervalListBetweenTwoDates(firstDay, firstDay); - }else{ - return getDateIntervalListBetweenTwoDates(lastDay, lastDay); - } - } - - /** - * get interval between monday to businessDate of this week - * @param businessDate - * @return - */ - public static List getThisWeekInterval(Date businessDate) { - Date mondayThisWeek = DateUtils.getMonday(businessDate); - return getDateIntervalListBetweenTwoDates(mondayThisWeek, businessDate); - } - - /** - * get interval between monday to sunday of last week - * default set monday the first day of week - * @param businessDate - * @return - */ - public static List getLastWeekInterval(Date businessDate) { - Date mondayThisWeek = DateUtils.getMonday(businessDate); - Date sunday = DateUtils.getSomeDay(mondayThisWeek, -1); - Date monday = DateUtils.getMonday(sunday); - return getDateIntervalListBetweenTwoDates(monday, sunday); - } - - /** - * get interval on the day of last week - * default set monday the first day of week - * @param businessDate - * @param dayOfWeek monday:1,tuesday:2,wednesday:3,thursday:4,friday:5,saturday:6,sunday:7 - * @return - */ - public static List getLastWeekOneDayInterval(Date businessDate, int dayOfWeek) { - Date mondayThisWeek = DateUtils.getMonday(businessDate); - Date sunday = DateUtils.getSomeDay(mondayThisWeek, -1); - Date monday = DateUtils.getMonday(sunday); - Date destDay = DateUtils.getSomeDay(monday, dayOfWeek -1); - return getDateIntervalListBetweenTwoDates(destDay, destDay); - } - - public static List getDateIntervalListBetweenTwoDates(Date firstDay, Date lastDay) { - List dateIntervals = new ArrayList<>(); - while(!firstDay.after(lastDay)){ - Date beginTime = DateUtils.getStartOfDay(firstDay); - Date endTime = DateUtils.getEndOfDay(firstDay); - dateIntervals.add(new DateInterval(beginTime, endTime)); - firstDay = DateUtils.getSomeDay(firstDay, 1); - } - return dateIntervals; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/BusinessTimeUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/BusinessTimeUtils.java deleted file mode 100644 index c6c35d3d70..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/BusinessTimeUtils.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils.placeholder; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.CommandType; - -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - -import static cn.escheduler.common.Constants.PARAMETER_FORMAT_DATE; -import static cn.escheduler.common.Constants.PARAMETER_FORMAT_TIME; -import static cn.escheduler.common.utils.DateUtils.format; -import static org.apache.commons.lang.time.DateUtils.addDays; - - -/** - * business time utils - */ -public class BusinessTimeUtils { - - /** - * get business time in parameters by different command types - * - * @param commandType command type - * @param runTime run time or schedule time - */ - public static Map getBusinessTime(CommandType commandType, Date runTime) { - Date businessDate = runTime; - switch (commandType) { - case COMPLEMENT_DATA: - break; - case START_PROCESS: - case START_CURRENT_TASK_PROCESS: - case RECOVER_TOLERANCE_FAULT_PROCESS: - case RECOVER_SUSPENDED_PROCESS: - case START_FAILURE_TASK_PROCESS: - case REPEAT_RUNNING: - case SCHEDULER: - default: - businessDate = addDays(new Date(), -1); - if (runTime != null){ - /** - * If there is a scheduled time, take the scheduling time. Recovery from failed nodes, suspension of recovery, re-run for scheduling - */ - businessDate = addDays(runTime, -1); - } - break; - } - Date businessCurrentDate = addDays(businessDate, 1); - Map result = new HashMap<>(); - result.put(Constants.PARAMETER_CURRENT_DATE, format(businessCurrentDate, PARAMETER_FORMAT_DATE)); - result.put(Constants.PARAMETER_BUSINESS_DATE, format(businessDate, PARAMETER_FORMAT_DATE)); - result.put(Constants.PARAMETER_DATETIME, format(businessCurrentDate, PARAMETER_FORMAT_TIME)); - return result; - } -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/PlaceholderUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/PlaceholderUtils.java deleted file mode 100644 index 7cdd0121bc..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/PlaceholderUtils.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils.placeholder; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; - -/** - * placeholder utils - */ -public class PlaceholderUtils { - - private static final Logger logger = LoggerFactory.getLogger(PlaceholderUtils.class); - - /** - * Prefix of the position to be replaced - */ - public static final String placeholderPrefix = "${"; - - /** - * The suffix of the position to be replaced - */ - public static final String placeholderSuffix = "}"; - - - /** - * Replaces all placeholders of format {@code ${name}} with the value returned - * from the supplied {@link PropertyPlaceholderHelper.PlaceholderResolver}. - * - * @param value the value containing the placeholders to be replaced - * @param paramsMap placeholder data dictionary - * @return the supplied value with placeholders replaced inline - */ - public static String replacePlaceholders(String value, Map paramsMap, boolean ignoreUnresolvablePlaceholders) { - //replacement tool, parameter key will be replaced by value,if can't match , will throw an exception - PropertyPlaceholderHelper strictHelper = getPropertyPlaceholderHelper(false); - - //Non-strict replacement tool implementation, when the position to be replaced does not get the corresponding value, the current position is ignored, and the next position is replaced. - PropertyPlaceholderHelper nonStrictHelper = getPropertyPlaceholderHelper(true); - - PropertyPlaceholderHelper helper = (ignoreUnresolvablePlaceholders ? nonStrictHelper : strictHelper); - - //the PlaceholderResolver to use for replacement - return helper.replacePlaceholders(value, new PropertyPlaceholderResolver(value, paramsMap)); - } - - /** - * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. - * @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should - * be ignored ({@code true}) or cause an exception ({@code false}) - */ - public static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) { - - return new PropertyPlaceholderHelper(placeholderPrefix, placeholderSuffix, null, ignoreUnresolvablePlaceholders); - } - - /** - * Placeholder replacement resolver - */ - private static class PropertyPlaceholderResolver implements PropertyPlaceholderHelper.PlaceholderResolver { - - private final String value; - - private final Map paramsMap; - - public PropertyPlaceholderResolver(String value, Map paramsMap) { - this.value = value; - this.paramsMap = paramsMap; - } - - @Override - public String resolvePlaceholder(String placeholderName) { - try { - return paramsMap.get(placeholderName); - } catch (Exception ex) { - logger.error(String.format("resolve placeholder '%s' in [ %s ]" , placeholderName, value), ex); - return null; - } - } - } - - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/PropertyPlaceholderHelper.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/PropertyPlaceholderHelper.java deleted file mode 100644 index b1c337b6f7..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/PropertyPlaceholderHelper.java +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils.placeholder; - - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import java.util.*; - -/** - * Utility class for working with Strings that have placeholder values in them. A placeholder takes the form - * {@code ${name}}. Using {@code PropertyPlaceholderHelper} these placeholders can be substituted for - * user-supplied values.

Values for substitution can be supplied using a {@link Properties} instance or - * using a {@link PlaceholderResolver}. - * - * @author Juergen Hoeller - * @author Rob Harrop - * @since 3.0 - */ -public class PropertyPlaceholderHelper { - - private static final Log logger = LogFactory.getLog(PropertyPlaceholderHelper.class); - - private static final Map wellKnownSimplePrefixes = new HashMap(4); - - static { - wellKnownSimplePrefixes.put("}", "{"); - wellKnownSimplePrefixes.put("]", "["); - wellKnownSimplePrefixes.put(")", "("); - } - - - private final String placeholderPrefix; - - private final String placeholderSuffix; - - private final String simplePrefix; - - private final String valueSeparator; - - private final boolean ignoreUnresolvablePlaceholders; - - - /** - * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. - * Unresolvable placeholders are ignored. - * @param placeholderPrefix the prefix that denotes the start of a placeholder - * @param placeholderSuffix the suffix that denotes the end of a placeholder - */ - public PropertyPlaceholderHelper(String placeholderPrefix, String placeholderSuffix) { - this(placeholderPrefix, placeholderSuffix, null, true); - } - - /** - * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. - * @param placeholderPrefix the prefix that denotes the start of a placeholder - * @param placeholderSuffix the suffix that denotes the end of a placeholder - * @param valueSeparator the separating character between the placeholder variable - * and the associated default value, if any - * @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should - * be ignored ({@code true}) or cause an exception ({@code false}) - */ - public PropertyPlaceholderHelper(String placeholderPrefix, String placeholderSuffix, - String valueSeparator, boolean ignoreUnresolvablePlaceholders) { - - notNull(placeholderPrefix, "'placeholderPrefix' must not be null"); - notNull(placeholderSuffix, "'placeholderSuffix' must not be null"); - this.placeholderPrefix = placeholderPrefix; - this.placeholderSuffix = placeholderSuffix; - String simplePrefixForSuffix = wellKnownSimplePrefixes.get(this.placeholderSuffix); - if (simplePrefixForSuffix != null && this.placeholderPrefix.endsWith(simplePrefixForSuffix)) { - this.simplePrefix = simplePrefixForSuffix; - } - else { - this.simplePrefix = this.placeholderPrefix; - } - this.valueSeparator = valueSeparator; - this.ignoreUnresolvablePlaceholders = ignoreUnresolvablePlaceholders; - } - - - /** - * Replaces all placeholders of format {@code ${name}} with the corresponding - * property from the supplied {@link Properties}. - * @param value the value containing the placeholders to be replaced - * @param properties the {@code Properties} to use for replacement - * @return the supplied value with placeholders replaced inline - */ - public String replacePlaceholders(String value, final Properties properties) { - notNull(properties, "'properties' must not be null"); - return replacePlaceholders(value, new PlaceholderResolver() { - @Override - public String resolvePlaceholder(String placeholderName) { - return properties.getProperty(placeholderName); - } - }); - } - - /** - * Replaces all placeholders of format {@code ${name}} with the value returned - * from the supplied {@link PlaceholderResolver}. - * @param value the value containing the placeholders to be replaced - * @param placeholderResolver the {@code PlaceholderResolver} to use for replacement - * @return the supplied value with placeholders replaced inline - */ - public String replacePlaceholders(String value, PlaceholderResolver placeholderResolver) { - notNull(value, "'value' must not be null"); - return parseStringValue(value, placeholderResolver, new HashSet()); - } - - protected String parseStringValue( - String value, PlaceholderResolver placeholderResolver, Set visitedPlaceholders) { - - StringBuilder result = new StringBuilder(value); - - int startIndex = value.indexOf(this.placeholderPrefix); - while (startIndex != -1) { - int endIndex = findPlaceholderEndIndex(result, startIndex); - if (endIndex != -1) { - String placeholder = result.substring(startIndex + this.placeholderPrefix.length(), endIndex); - String originalPlaceholder = placeholder; - if (!visitedPlaceholders.add(originalPlaceholder)) { - throw new IllegalArgumentException( - "Circular placeholder reference '" + originalPlaceholder + "' in property definitions"); - } - // Recursive invocation, parsing placeholders contained in the placeholder key. - placeholder = parseStringValue(placeholder, placeholderResolver, visitedPlaceholders); - // Now obtain the value for the fully resolved key... - String propVal = placeholderResolver.resolvePlaceholder(placeholder); - if (propVal == null && this.valueSeparator != null) { - int separatorIndex = placeholder.indexOf(this.valueSeparator); - if (separatorIndex != -1) { - String actualPlaceholder = placeholder.substring(0, separatorIndex); - String defaultValue = placeholder.substring(separatorIndex + this.valueSeparator.length()); - propVal = placeholderResolver.resolvePlaceholder(actualPlaceholder); - if (propVal == null) { - propVal = defaultValue; - } - } - } - if (propVal != null) { - // Recursive invocation, parsing placeholders contained in the - // previously resolved placeholder value. - propVal = parseStringValue(propVal, placeholderResolver, visitedPlaceholders); - result.replace(startIndex, endIndex + this.placeholderSuffix.length(), propVal); - if (logger.isTraceEnabled()) { - logger.trace("Resolved placeholder '" + placeholder + "'"); - } - startIndex = result.indexOf(this.placeholderPrefix, startIndex + propVal.length()); - } - else if (this.ignoreUnresolvablePlaceholders) { - // Proceed with unprocessed value. - startIndex = result.indexOf(this.placeholderPrefix, endIndex + this.placeholderSuffix.length()); - } - else { - throw new IllegalArgumentException("Could not resolve placeholder '" + - placeholder + "'" + " in value \"" + value + "\""); - } - visitedPlaceholders.remove(originalPlaceholder); - } - else { - startIndex = -1; - } - } - - return result.toString(); - } - - private int findPlaceholderEndIndex(CharSequence buf, int startIndex) { - int index = startIndex + this.placeholderPrefix.length(); - int withinNestedPlaceholder = 0; - while (index < buf.length()) { - if (substringMatch(buf, index, this.placeholderSuffix)) { - if (withinNestedPlaceholder > 0) { - withinNestedPlaceholder--; - index = index + this.placeholderSuffix.length(); - } - else { - return index; - } - } - else if (substringMatch(buf, index, this.simplePrefix)) { - withinNestedPlaceholder++; - index = index + this.simplePrefix.length(); - } - else { - index++; - } - } - return -1; - } - - - /** - * Strategy interface used to resolve replacement values for placeholders contained in Strings. - */ - public interface PlaceholderResolver { - - /** - * Resolve the supplied placeholder name to the replacement value. - * @param placeholderName the name of the placeholder to resolve - * @return the replacement value, or {@code null} if no replacement is to be made - */ - String resolvePlaceholder(String placeholderName); - } - - /** - * Test whether the given string matches the given substring - * at the given index. - * @param str the original string (or StringBuilder) - * @param index the index in the original string to start matching against - * @param substring the substring to match at the given index - */ - public static boolean substringMatch(CharSequence str, int index, CharSequence substring) { - for (int j = 0; j < substring.length(); j++) { - int i = index + j; - if (i >= str.length() || str.charAt(i) != substring.charAt(j)) { - return false; - } - } - return true; - } - - /** - * Assert that an object is not {@code null}. - *

Assert.notNull(clazz, "The class must not be null");
- * @param object the object to check - * @param message the exception message to use if the assertion fails - * @throws IllegalArgumentException if the object is {@code null} - */ - public static void notNull(Object object, String message) { - if (object == null) { - throw new IllegalArgumentException(message); - } - } - - -} - diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/TimePlaceholderUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/TimePlaceholderUtils.java deleted file mode 100644 index c57613b648..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/TimePlaceholderUtils.java +++ /dev/null @@ -1,512 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils.placeholder; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.utils.DateUtils; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; - -import static cn.escheduler.common.Constants.*; - -/** - * time place holder utils - */ -public class TimePlaceholderUtils { - private static final Logger logger = LoggerFactory.getLogger(TimePlaceholderUtils.class); - - /** - * Prefix of the position to be replaced - */ - public static final String placeholderPrefix = "$["; - - /** - * The suffix of the position to be replaced - */ - public static final String placeholderSuffix = "]"; - - /** - * Replaces all placeholders of format {@code ${name}} with the value returned - * from the supplied {@link PropertyPlaceholderHelper.PlaceholderResolver}. - * - * @param value the value containing the placeholders to be replaced - * @param date custom date - * @param ignoreUnresolvablePlaceholders - * @return the supplied value with placeholders replaced inline - */ - public static String replacePlaceholders(String value, Date date, boolean ignoreUnresolvablePlaceholders) { - PropertyPlaceholderHelper strictHelper = getPropertyPlaceholderHelper(false); - PropertyPlaceholderHelper nonStrictHelper = getPropertyPlaceholderHelper(true); - - PropertyPlaceholderHelper helper = (ignoreUnresolvablePlaceholders ? nonStrictHelper : strictHelper); - return helper.replacePlaceholders(value, new TimePlaceholderResolver(value, date)); - } - - - /** - * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. - * @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should - * be ignored ({@code true}) or cause an exception ({@code false}) - */ - private static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) { - return new PropertyPlaceholderHelper(placeholderPrefix, placeholderSuffix, null, ignoreUnresolvablePlaceholders); - } - - /** - * calculate expression's value - * - * @param expression - * @return - */ - public static Integer calculate(String expression) { - expression = StringUtils.trim(expression); - expression = convert(expression); - - List result = string2List(expression); - result = convert2SuffixList(result); - - return calculate(result); - } - - /** - * Change the sign in the expression to P (positive) N (negative) - * - * @param expression - * @return eg. "-3+-6*(+8)-(-5) -> S3+S6*(P8)-(S5)" - */ - private static String convert(String expression) { - char[] arr = expression.toCharArray(); - - for (int i = 0; i < arr.length; i++) { - if (arr[i] == SUBTRACT_CHAR) { - if (i == 0) { - arr[i] = N; - } else { - char c = arr[i - 1]; - if (c == ADD_CHAR || c == SUBTRACT_CHAR || c == MULTIPLY_CHAR || c == DIVISION_CHAR || c == LEFT_BRACE_CHAR) { - arr[i] = N; - } - } - } else if (arr[i] == ADD_CHAR) { - if (i == 0) { - arr[i] = P; - } else { - char c = arr[i - 1]; - if (c == ADD_CHAR || c == SUBTRACT_CHAR || c == MULTIPLY_CHAR || c == DIVISION_CHAR || c == LEFT_BRACE_CHAR) { - arr[i] = P; - } - } - } - } - - return new String(arr); - } - - /** - * to suffix expression - * - * @param srcList - * @return - */ - private static List convert2SuffixList(List srcList) { - List result = new ArrayList<>(); - Stack stack = new Stack<>(); - - for (int i = 0; i < srcList.size(); i++) { - if (Character.isDigit(srcList.get(i).charAt(0))) { - result.add(srcList.get(i)); - } else { - switch (srcList.get(i).charAt(0)) { - case LEFT_BRACE_CHAR: - stack.push(srcList.get(i)); - break; - case RIGHT_BRACE_CHAR: - while (!LEFT_BRACE_STRING.equals(stack.peek())) { - result.add(stack.pop()); - } - stack.pop(); - break; - default: - while (!stack.isEmpty() && compare(stack.peek(), srcList.get(i))) { - result.add(stack.pop()); - } - stack.push(srcList.get(i)); - break; - } - } - } - - while (!stack.isEmpty()) { - result.add(stack.pop()); - } - - return result; - } - - /** - * Calculate the suffix expression - * - * @param result - * @return - */ - private static Integer calculate(List result) { - Stack stack = new Stack<>(); - for (int i = 0; i < result.size(); i++) { - if (Character.isDigit(result.get(i).charAt(0))) { - stack.push(Integer.parseInt(result.get(i))); - } else { - Integer backInt = stack.pop(); - Integer frontInt = 0; - char op = result.get(i).charAt(0); - - if (!(op == P || op == N)) { - frontInt = stack.pop(); - } - - Integer res = 0; - switch (result.get(i).charAt(0)) { - case P: - res = frontInt + backInt; - break; - case N: - res = frontInt - backInt; - break; - case ADD_CHAR: - res = frontInt + backInt; - break; - case SUBTRACT_CHAR: - res = frontInt - backInt; - break; - case MULTIPLY_CHAR: - res = frontInt * backInt; - break; - case DIVISION_CHAR: - res = frontInt / backInt; - break; - default: - break; - } - stack.push(res); - } - } - - return stack.pop(); - } - - /** - * string to list - * - * @param expression - * @return list - */ - private static List string2List(String expression) { - List result = new ArrayList<>(); - String num = ""; - for (int i = 0; i < expression.length(); i++) { - if (Character.isDigit(expression.charAt(i))) { - num = num + expression.charAt(i); - } else { - if (!num.isEmpty()) { - result.add(num); - } - result.add(expression.charAt(i) + ""); - num = ""; - } - } - - if (!num.isEmpty()) { - result.add(num); - } - - return result; - } - - /** - * compare loginUser level - * - * @param peek - * @param cur - * @return true or false - */ - private static boolean compare(String peek, String cur) { - if (MULTIPLY_STRING.equals(peek) && (DIVISION_STRING.equals(cur) || MULTIPLY_STRING.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { - return true; - } else if (DIVISION_STRING.equals(peek) && (DIVISION_STRING.equals(cur) || MULTIPLY_STRING.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { - return true; - } else if (ADD_STRING.equals(peek) && (ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { - return true; - } else { - return SUBTRACT_STRING.equals(peek) && (ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur)); - } - - } - - /** - * Placeholder replacement resolver - */ - private static class TimePlaceholderResolver implements - PropertyPlaceholderHelper.PlaceholderResolver { - - private final String value; - - private final Date date; - - public TimePlaceholderResolver(String value, Date date) { - this.value = value; - this.date = date; - } - - @Override - public String resolvePlaceholder(String placeholderName) { - try { - return calculateTime(placeholderName, date); - } catch (Exception ex) { - logger.error(String.format("resolve placeholder '%s' in [ %s ]" , placeholderName, value), ex); - return null; - } - } - } - - - /** - * calculate time - * - * @param date date - * @return calculate time - */ - private static String calculateTime(String expression, Date date) { - // After N years: $[add_months(yyyyMMdd,12*N)], the first N months: $[add_months(yyyyMMdd,-N)], etc - String value; - - try { - if (expression.startsWith(Constants.TIMESTAMP)) { - String timeExpression = expression.substring(Constants.TIMESTAMP.length() + 1, expression.length() - 1); - - Map.Entry entry = calcTimeExpression(timeExpression, date); - - String dateStr = DateUtils.format(entry.getKey(), entry.getValue()); - - Date timestamp = DateUtils.parse(dateStr, Constants.PARAMETER_FORMAT_TIME); - - value = String.valueOf(timestamp.getTime() / 1000); - } else { - Map.Entry entry = calcTimeExpression(expression, date); - value = DateUtils.format(entry.getKey(), entry.getValue()); - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - throw e; - } - - return value; - } - - /** - * calculate time expresstion - * - * @return - */ - public static Map.Entry calcTimeExpression(String expression, Date date) { - Map.Entry resultEntry; - - if (expression.startsWith(Constants.ADD_MONTHS)) { - resultEntry = calcMonths(expression, date); - } else if (expression.startsWith(Constants.MONTH_BEGIN)) { - resultEntry = calcMonthBegin(expression, date); - } else if (expression.startsWith(Constants.MONTH_END)) { - resultEntry = calcMonthEnd(expression, date); - } else if (expression.startsWith(Constants.WEEK_BEGIN)) { - resultEntry = calcWeekStart(expression, date); - } else if (expression.startsWith(Constants.WEEK_END)) { - resultEntry = calcWeekEnd(expression, date); - } else { - resultEntry = calcMinutes(expression, date); - } - - return resultEntry; - } - - /** - * get first day of month - * - * @return - */ - public static Map.Entry calcMonthBegin(String expression, Date date) { - String addMonthExpr = expression.substring(Constants.MONTH_BEGIN.length() + 1, expression.length() - 1); - String[] params = addMonthExpr.split(Constants.COMMA); - - if (params.length == 2) { - String dateFormat = params[0]; - String dayExpr = params[1]; - Integer day = calculate(dayExpr); - Date targetDate = DateUtils.getFirstDayOfMonth(date); - targetDate = org.apache.commons.lang.time.DateUtils.addDays(targetDate, day); - - return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); - } - - throw new RuntimeException("expression not valid"); - } - - /** - * get last day of month - * - */ - public static Map.Entry calcMonthEnd(String expression, Date date) { - String addMonthExpr = expression.substring(Constants.MONTH_END.length() + 1, expression.length() - 1); - String[] params = addMonthExpr.split(Constants.COMMA); - - if (params.length == 2) { - String dateFormat = params[0]; - String dayExpr = params[1]; - Integer day = calculate(dayExpr); - Date targetDate = DateUtils.getLastDayOfMonth(date); - targetDate = org.apache.commons.lang.time.DateUtils.addDays(targetDate, day); - - return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); - } - - throw new RuntimeException("expression not valid"); - } - - /** - * get first day of week - * - * @return monday - */ - public static Map.Entry calcWeekStart(String expression, Date date) { - String addMonthExpr = expression.substring(Constants.WEEK_BEGIN.length() + 1, expression.length() - 1); - String[] params = addMonthExpr.split(Constants.COMMA); - - if (params.length == 2) { - String dateFormat = params[0]; - String dayExpr = params[1]; - Integer day = calculate(dayExpr); - Date targetDate = DateUtils.getMonday(date); - targetDate = org.apache.commons.lang.time.DateUtils.addDays(targetDate, day); - return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); - } - - throw new RuntimeException("expression not valid"); - } - - /** - * get last day of week - * - */ - public static Map.Entry calcWeekEnd(String expression, Date date) { - String addMonthExpr = expression.substring(Constants.WEEK_END.length() + 1, expression.length() - 1); - String[] params = addMonthExpr.split(Constants.COMMA); - - if (params.length == 2) { - String dateFormat = params[0]; - String dayExpr = params[1]; - Integer day = calculate(dayExpr); - Date targetDate = DateUtils.getSunday(date); - targetDate = org.apache.commons.lang.time.DateUtils.addDays(targetDate, day); - - return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); - } - - throw new RuntimeException("Expression not valid"); - } - - /** - * calc months expression - * - * @return - */ - public static Map.Entry calcMonths(String expression, Date date) { - String addMonthExpr = expression.substring(Constants.ADD_MONTHS.length() + 1, expression.length() - 1); - String[] params = addMonthExpr.split(Constants.COMMA); - - if (params.length == 2) { - String dateFormat = params[0]; - String monthExpr = params[1]; - Integer addMonth = calculate(monthExpr); - Date targetDate = org.apache.commons.lang.time.DateUtils.addMonths(date, addMonth); - - return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); - } - - throw new RuntimeException("expression not valid"); - } - - /** - * calculate time expression - * - * - * @return - */ - public static Map.Entry calcMinutes(String expression, Date date) { - if (expression.contains("+")) { - int index = expression.lastIndexOf('+'); - - if (Character.isDigit(expression.charAt(index + 1))) { - String addMinuteExpr = expression.substring(index + 1); - Date targetDate = org.apache.commons.lang.time.DateUtils - .addMinutes(date, calcMinutes(addMinuteExpr)); - String dateFormat = expression.substring(0, index); - - return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); - } - } else if (expression.contains("-")) { - int index = expression.lastIndexOf('-'); - - if (Character.isDigit(expression.charAt(index + 1))) { - String addMinuteExpr = expression.substring(index + 1); - Date targetDate = org.apache.commons.lang.time.DateUtils - .addMinutes(date, 0 - calcMinutes(addMinuteExpr)); - String dateFormat = expression.substring(0, index); - - return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); - } - - // yyyy-MM-dd/HH:mm:ss - return new AbstractMap.SimpleImmutableEntry<>(date, expression); - } - - // $[HHmmss] - return new AbstractMap.SimpleImmutableEntry<>(date, expression); - } - - /** - * calculate need minutes - * - * @param minuteExpression - * @return - */ - public static Integer calcMinutes(String minuteExpression) { - int index = minuteExpression.indexOf("/"); - - String calcExpression; - - if (index == -1) { - calcExpression = String.format("60*24*(%s)", minuteExpression); - } else { - - calcExpression = String.format("60*24*(%s)%s", minuteExpression.substring(0, index), - minuteExpression.substring(index)); - } - - return calculate(calcExpression); - } - -} diff --git a/escheduler-common/src/main/java/cn/escheduler/common/zk/AbstractZKClient.java b/escheduler-common/src/main/java/cn/escheduler/common/zk/AbstractZKClient.java deleted file mode 100644 index 7a1d63cdff..0000000000 --- a/escheduler-common/src/main/java/cn/escheduler/common/zk/AbstractZKClient.java +++ /dev/null @@ -1,583 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.zk; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.IStoppable; -import cn.escheduler.common.enums.ZKNodeType; -import cn.escheduler.common.model.MasterServer; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.common.utils.OSUtils; -import cn.escheduler.common.utils.ResInfo; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.commons.lang3.StringUtils; -import org.apache.curator.RetryPolicy; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.CuratorFrameworkFactory; -import org.apache.curator.framework.imps.CuratorFrameworkState; -import org.apache.curator.framework.recipes.locks.InterProcessMutex; -import org.apache.curator.framework.state.ConnectionState; -import org.apache.curator.framework.state.ConnectionStateListener; -import org.apache.curator.retry.ExponentialBackoffRetry; -import org.apache.zookeeper.CreateMode; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; - -import static cn.escheduler.common.Constants.*; - - -/** - * abstract zookeeper client - */ -public abstract class AbstractZKClient { - - private static final Logger logger = LoggerFactory.getLogger(AbstractZKClient.class); - - /** - * load configuration file - */ - protected static Configuration conf; - - protected CuratorFramework zkClient = null; - - /** - * server stop or not - */ - protected IStoppable stoppable = null; - - - static { - try { - conf = new PropertiesConfiguration(Constants.ZOOKEEPER_PROPERTIES_PATH); - }catch (ConfigurationException e){ - logger.error("load configuration failed : " + e.getMessage(),e); - System.exit(1); - } - } - - - public AbstractZKClient() { - - // retry strategy - RetryPolicy retryPolicy = new ExponentialBackoffRetry( - Integer.parseInt(conf.getString(Constants.ZOOKEEPER_RETRY_SLEEP)), - Integer.parseInt(conf.getString(Constants.ZOOKEEPER_RETRY_MAXTIME))); - - try{ - // crate zookeeper client - zkClient = CuratorFrameworkFactory.builder() - .connectString(getZookeeperQuorum()) - .retryPolicy(retryPolicy) - .sessionTimeoutMs(1000 * Integer.parseInt(conf.getString(Constants.ZOOKEEPER_SESSION_TIMEOUT))) - .connectionTimeoutMs(1000 * Integer.parseInt(conf.getString(Constants.ZOOKEEPER_CONNECTION_TIMEOUT))) - .build(); - - zkClient.start(); - initStateLister(); - - }catch(Exception e){ - logger.error("create zookeeper connect failed : " + e.getMessage(),e); - System.exit(-1); - } - } - - /** - * - * register status monitoring events for zookeeper clients - */ - public void initStateLister(){ - if(zkClient == null) { - return; - } - // add ConnectionStateListener monitoring zookeeper connection state - ConnectionStateListener csLister = new ConnectionStateListener() { - - @Override - public void stateChanged(CuratorFramework client, ConnectionState newState) { - logger.info("state changed , current state : " + newState.name()); - /** - * probably session expired - */ - if(newState == ConnectionState.LOST){ - // if lost , then exit - logger.info("current zookeepr connection state : connection lost "); - } - } - }; - - zkClient.getConnectionStateListenable().addListener(csLister); - } - - - public void start() { - zkClient.start(); - logger.info("zookeeper start ..."); - } - - public void close() { - zkClient.getZookeeperClient().close(); - zkClient.close(); - logger.info("zookeeper close ..."); - } - - - /** - * heartbeat for zookeeper - * @param znode - */ - public void heartBeatForZk(String znode, String serverType){ - try { - - //check dead or not in zookeeper - if(zkClient.getState() == CuratorFrameworkState.STOPPED || checkIsDeadServer(znode, serverType)){ - stoppable.stop("i was judged to death, release resources and stop myself"); - return; - } - - byte[] bytes = zkClient.getData().forPath(znode); - String resInfoStr = new String(bytes); - String[] splits = resInfoStr.split(Constants.COMMA); - if (splits.length != Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH){ - return; - } - String str = splits[0] + Constants.COMMA +splits[1] + Constants.COMMA - + OSUtils.cpuUsage() + Constants.COMMA - + OSUtils.memoryUsage() + Constants.COMMA - + splits[4] + Constants.COMMA - + DateUtils.dateToString(new Date()); - zkClient.setData().forPath(znode,str.getBytes()); - - } catch (Exception e) { - logger.error("heartbeat for zk failed : " + e.getMessage(), e); - stoppable.stop("heartbeat for zk exception, release resources and stop myself"); - } - } - - /** - * check dead server or not , if dead, stop self - * - * @param zNode node path - * @param serverType master or worker prefix - * @throws Exception - */ - protected boolean checkIsDeadServer(String zNode, String serverType) throws Exception { - //ip_sequenceno - String[] zNodesPath = zNode.split("\\/"); - String ipSeqNo = zNodesPath[zNodesPath.length - 1]; - - String type = serverType.equals(MASTER_PREFIX) ? MASTER_PREFIX : WORKER_PREFIX; - String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + type + UNDERLINE + ipSeqNo; - - if(zkClient.checkExists().forPath(zNode) == null || - zkClient.checkExists().forPath(deadServerPath) != null ){ - return true; - } - - - return false; - } - - - public void removeDeadServerByHost(String host, String serverType) throws Exception { - List deadServers = zkClient.getChildren().forPath(getDeadZNodeParentPath()); - for(String serverPath : deadServers){ - if(serverPath.startsWith(serverType+UNDERLINE+host)){ - String server = getDeadZNodeParentPath() + SINGLE_SLASH + serverPath; - zkClient.delete().forPath(server); - logger.info("{} server {} deleted from zk dead server path success" , serverType , host); - } - } - } - - - /** - * create zookeeper path according the zk node type. - * @param zkNodeType - * @return - * @throws Exception - */ - private String createZNodePath(ZKNodeType zkNodeType) throws Exception { - // specify the format of stored data in ZK nodes - String heartbeatZKInfo = ResInfo.getHeartBeatInfo(new Date()); - // create temporary sequence nodes for master znode - String parentPath = getZNodeParentPath(zkNodeType); - String serverPathPrefix = parentPath + "/" + OSUtils.getHost(); - String registerPath = zkClient.create().withMode(CreateMode.EPHEMERAL_SEQUENTIAL).forPath( - serverPathPrefix + "_", heartbeatZKInfo.getBytes()); - logger.info("register {} node {} success" , zkNodeType.toString(), registerPath); - return registerPath; - } - - /** - * register server, if server already exists, return null. - * @param zkNodeType - * @return register server path in zookeeper - */ - public String registerServer(ZKNodeType zkNodeType) throws Exception { - String registerPath = null; - String host = OSUtils.getHost(); - if(checkZKNodeExists(host, zkNodeType)){ - logger.error("register failure , {} server already started on host : {}" , - zkNodeType.toString(), host); - return registerPath; - } - registerPath = createZNodePath(zkNodeType); - logger.info("register {} node {} success", zkNodeType.toString(), registerPath); - - // handle dead server - handleDeadServer(registerPath, zkNodeType, Constants.DELETE_ZK_OP); - - return registerPath; - } - - /** - * opType(add): if find dead server , then add to zk deadServerPath - * opType(delete): delete path from zk - * - * @param zNode node path - * @param zkNodeType master or worker - * @param opType delete or add - * @throws Exception - */ - public void handleDeadServer(String zNode, ZKNodeType zkNodeType, String opType) throws Exception { - //ip_sequenceno - String[] zNodesPath = zNode.split("\\/"); - String ipSeqNo = zNodesPath[zNodesPath.length - 1]; - - String type = (zkNodeType == ZKNodeType.MASTER) ? MASTER_PREFIX : WORKER_PREFIX; - - - //check server restart, if restart , dead server path in zk should be delete - if(opType.equals(DELETE_ZK_OP)){ - String[] ipAndSeqNo = ipSeqNo.split(UNDERLINE); - String ip = ipAndSeqNo[0]; - removeDeadServerByHost(ip, type); - - }else if(opType.equals(ADD_ZK_OP)){ - String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + type + UNDERLINE + ipSeqNo; - if(zkClient.checkExists().forPath(deadServerPath) == null){ - //add dead server info to zk dead server path : /dead-servers/ - - zkClient.create().forPath(deadServerPath,(type + UNDERLINE + ipSeqNo).getBytes()); - - logger.info("{} server dead , and {} added to zk dead server path success" , - zkNodeType.toString(), zNode); - } - } - - } - - - - /** - * for stop server - * @param serverStoppable - */ - public void setStoppable(IStoppable serverStoppable){ - this.stoppable = serverStoppable; - } - - /** - * get active master num - * @return - */ - public int getActiveMasterNum(){ - List childrenList = new ArrayList<>(); - try { - // read master node parent path from conf - if(zkClient.checkExists().forPath(getZNodeParentPath(ZKNodeType.MASTER)) != null){ - childrenList = zkClient.getChildren().forPath(getZNodeParentPath(ZKNodeType.MASTER)); - } - } catch (Exception e) { - if(e.getMessage().contains("java.lang.IllegalStateException: instance must be started")){ - logger.error("zookeeper service not started",e); - }else{ - logger.error(e.getMessage(),e); - } - - }finally { - return childrenList.size(); - } - } - - /** - * - * @return zookeeper quorum - */ - public static String getZookeeperQuorum(){ - StringBuilder sb = new StringBuilder(); - String[] zookeeperParamslist = conf.getStringArray(Constants.ZOOKEEPER_QUORUM); - for (String param : zookeeperParamslist) { - sb.append(param).append(Constants.COMMA); - } - - if(sb.length() > 0){ - sb.deleteCharAt(sb.length() - 1); - } - - return sb.toString(); - } - - /** - * get server list. - * @param zkNodeType - * @return - */ - public List getServersList(ZKNodeType zkNodeType){ - Map masterMap = getServerMaps(zkNodeType); - String parentPath = getZNodeParentPath(zkNodeType); - - List masterServers = new ArrayList<>(); - int i = 0; - for(String path : masterMap.keySet()){ - MasterServer masterServer = ResInfo.parseHeartbeatForZKInfo(masterMap.get(path)); - masterServer.setZkDirectory( parentPath + "/"+ path); - masterServer.setId(i); - i ++; - masterServers.add(masterServer); - } - return masterServers; - } - - /** - * get master server list map. - * result : {host : resource info} - * @return - */ - public Map getServerMaps(ZKNodeType zkNodeType){ - - Map masterMap = new HashMap<>(); - try { - String path = getZNodeParentPath(zkNodeType); - List serverList = getZkClient().getChildren().forPath(path); - for(String server : serverList){ - byte[] bytes = getZkClient().getData().forPath(path + "/" + server); - masterMap.putIfAbsent(server, new String(bytes)); - } - } catch (Exception e) { - logger.error("get server list failed : " + e.getMessage(), e); - } - - return masterMap; - } - - /** - * check the zookeeper node already exists - * @param host - * @param zkNodeType - * @return - * @throws Exception - */ - public boolean checkZKNodeExists(String host, ZKNodeType zkNodeType) { - String path = getZNodeParentPath(zkNodeType); - if(StringUtils.isEmpty(path)){ - logger.error("check zk node exists error, host:{}, zk node type:{}", - host, zkNodeType.toString()); - return false; - } - Map serverMaps = getServerMaps(zkNodeType); - for(String hostKey : serverMaps.keySet()){ - if(hostKey.startsWith(host)){ - return true; - } - } - return false; - } - - /** - * get zkclient - * @return - */ - public CuratorFramework getZkClient() { - return zkClient; - } - - /** - * get worker node parent path - * @return - */ - protected String getWorkerZNodeParentPath(){return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_WORKERS);}; - - /** - * get master node parent path - * @return - */ - protected String getMasterZNodeParentPath(){return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_MASTERS);} - - /** - * get master lock path - * @return - */ - public String getMasterLockPath(){ - return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_MASTERS); - } - - /** - * get zookeeper node parent path - * @param zkNodeType - * @return - */ - public String getZNodeParentPath(ZKNodeType zkNodeType) { - String path = ""; - switch (zkNodeType){ - case MASTER: - return getMasterZNodeParentPath(); - case WORKER: - return getWorkerZNodeParentPath(); - case DEAD_SERVER: - return getDeadZNodeParentPath(); - default: - break; - } - return path; - } - - /** - * get dead server node parent path - * @return - */ - protected String getDeadZNodeParentPath(){ - return conf.getString(ZOOKEEPER_ESCHEDULER_DEAD_SERVERS); - } - - /** - * get master start up lock path - * @return - */ - public String getMasterStartUpLockPath(){ - return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS); - } - - /** - * get master failover lock path - * @return - */ - public String getMasterFailoverLockPath(){ - return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_MASTERS); - } - - /** - * get worker failover lock path - * @return - */ - public String getWorkerFailoverLockPath(){ - return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_FAILOVER_WORKERS); - } - - /** - * release mutex - * @param mutex - */ - public static void releaseMutex(InterProcessMutex mutex) { - if (mutex != null){ - try { - mutex.release(); - } catch (Exception e) { - if(e.getMessage().equals("instance must be started before calling this method")){ - logger.warn("lock release"); - }else{ - logger.error("lock release failed : " + e.getMessage(),e); - } - - } - } - } - - /** - * init system znode - */ - protected void initSystemZNode(){ - try { - createNodePath(getMasterZNodeParentPath()); - createNodePath(getWorkerZNodeParentPath()); - createNodePath(getDeadZNodeParentPath()); - - } catch (Exception e) { - logger.error("init system znode failed : " + e.getMessage(),e); - } - } - - /** - * create zookeeper node path if not exists - * @param zNodeParentPath - * @throws Exception - */ - private void createNodePath(String zNodeParentPath) throws Exception { - if(null == zkClient.checkExists().forPath(zNodeParentPath)){ - zkClient.create().creatingParentContainersIfNeeded() - .withMode(CreateMode.PERSISTENT).forPath(zNodeParentPath); - } - } - - /** - * server self dead, stop all threads - * @param serverHost - * @param zkNodeType - */ - protected boolean checkServerSelfDead(String serverHost, ZKNodeType zkNodeType) { - if (serverHost.equals(OSUtils.getHost())) { - logger.error("{} server({}) of myself dead , stopping...", - zkNodeType.toString(), serverHost); - stoppable.stop(String.format(" {} server {} of myself dead , stopping...", - zkNodeType.toString(), serverHost)); - return true; - } - return false; - } - - /** - * get host ip, string format: masterParentPath/ip_000001/value - * @param path - * @return - */ - protected String getHostByEventDataPath(String path) { - int startIndex = path.lastIndexOf("/")+1; - int endIndex = path.lastIndexOf("_"); - - if(startIndex >= endIndex){ - logger.error("parse ip error"); - return ""; - } - return path.substring(startIndex, endIndex); - } - /** - * acquire zk lock - * @param zkClient - * @param zNodeLockPath - * @throws Exception - */ - public InterProcessMutex acquireZkLock(CuratorFramework zkClient,String zNodeLockPath)throws Exception{ - InterProcessMutex mutex = new InterProcessMutex(zkClient, zNodeLockPath); - mutex.acquire(); - return mutex; - } - - @Override - public String toString() { - return "AbstractZKClient{" + - "zkClient=" + zkClient + - ", deadServerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.DEAD_SERVER) + '\'' + - ", masterZNodeParentPath='" + getZNodeParentPath(ZKNodeType.MASTER) + '\'' + - ", workerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.WORKER) + '\'' + - ", stoppable=" + stoppable + - '}'; - } -} diff --git a/escheduler-common/src/test/java/cn/escheduler/common/graph/DAGTest.java b/escheduler-common/src/test/java/cn/escheduler/common/graph/DAGTest.java deleted file mode 100644 index 910181baf0..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/graph/DAGTest.java +++ /dev/null @@ -1,356 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.graph; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import static org.junit.Assert.*; - -public class DAGTest { - private DAG graph; - private static final Logger logger = LoggerFactory.getLogger(DAGTest.class); - - @Before - public void setup() { - graph = new DAG<>(); - } - - @After - public void tearDown() { - clear(); - } - - private void clear() { - graph = null; - graph = new DAG<>(); - - assertEquals(graph.getNodesCount(), 0); - } - - - private void makeGraph() { - clear(); - - // 1->2 - // 2->5 - // 3->5 - // 4->6 - // 5->6 - // 6->7 - - for (int i = 1; i <= 7; ++i) { - graph.addNode(i, "v(" + i + ")"); - } - - // 构造边 - assertTrue(graph.addEdge(1, 2)); - - assertTrue(graph.addEdge(2, 5)); - - assertTrue(graph.addEdge(3, 5)); - - assertTrue(graph.addEdge(4, 6)); - - assertTrue(graph.addEdge(5, 6)); - - assertTrue(graph.addEdge(6, 7)); - - assertEquals(graph.getNodesCount(), 7); - assertEquals(graph.getEdgesCount(), 6); - - } - - - /** - * 测试增加顶点 - */ - @Test - public void testAddNode() { - clear(); - - graph.addNode(1, "v(1)"); - graph.addNode(2, null); - graph.addNode(5, "v(5)"); - - assertEquals(graph.getNodesCount(), 3); - - assertEquals(graph.getNode(1), "v(1)"); - assertTrue(graph.containsNode(1)); - - assertFalse(graph.containsNode(10)); - } - - - /** - * 添加边 - */ - @Test - public void testAddEdge() { - clear(); - - assertFalse(graph.addEdge(1, 2, "edge(1 -> 2)", false)); - - graph.addNode(1, "v(1)"); - - assertTrue(graph.addEdge(1, 2, "edge(1 -> 2)",true)); - - graph.addNode(2, "v(2)"); - - assertTrue(graph.addEdge(1, 2, "edge(1 -> 2)",true)); - - assertFalse(graph.containsEdge(1, 3)); - - assertTrue(graph.containsEdge(1, 2)); - assertEquals(graph.getEdgesCount(), 1); - - } - - - /** - * 测试后续结点 - */ - @Test - public void testSubsequentNodes() { - makeGraph(); - - assertEquals(graph.getSubsequentNodes(1).size(), 1); - - } - - - /** - * 测试入度 - */ - @Test - public void testIndegree() { - makeGraph(); - - assertEquals(graph.getIndegree(1), 0); - assertEquals(graph.getIndegree(2), 1); - assertEquals(graph.getIndegree(3), 0); - assertEquals(graph.getIndegree(4), 0); - } - - - /** - * 测试起点 - */ - @Test - public void testBeginNode() { - makeGraph(); - - assertEquals(graph.getBeginNode().size(), 3); - - assertTrue(graph.getBeginNode().contains(1)); - assertTrue(graph.getBeginNode().contains(3)); - assertTrue(graph.getBeginNode().contains(4)); - } - - - /** - * 测试终点 - */ - @Test - public void testEndNode() { - makeGraph(); - - assertEquals(graph.getEndNode().size(), 1); - - assertTrue(graph.getEndNode().contains(7)); - } - - - /** - * 测试环 - */ - @Test - public void testCycle() { - clear(); - - // 构造顶点 - for (int i = 1; i <= 5; ++i) { - graph.addNode(i, "v(" + i + ")"); - } - - // 构造边, 1->2, 2->3, 3->4 - try { - graph.addEdge(1, 2); - graph.addEdge(2, 3); - graph.addEdge(3, 4); - - assertFalse(graph.hasCycle()); - } catch (Exception e) { - e.printStackTrace(); - fail(); - } - - - try { - boolean addResult = graph.addEdge(4, 1);//有环,添加失败 - - if(!addResult){//有环,添加失败 - assertTrue(true); - } - - graph.addEdge(5, 1); - - assertFalse(graph.hasCycle()); - } catch (Exception e) { - e.printStackTrace(); - fail(); - } - - // 重新清空 - clear(); - - // 构造顶点 - for (int i = 1; i <= 5; ++i) { - graph.addNode(i, "v(" + i +")"); - } - - // 构造边, 1->2, 2->3, 3->4 - try { - graph.addEdge(1, 2); - graph.addEdge(2, 3); - graph.addEdge(3, 4); - graph.addEdge(4, 5); - graph.addEdge(5, 2);//会失败,添加不进去,所以下一步无环 - - assertFalse(graph.hasCycle()); - } catch (Exception e) { - e.printStackTrace(); - fail(); - } - } - - - @Test - public void testTopologicalSort(){ - makeGraph(); - - try { - List topoList = new ArrayList<>();//一种拓扑结果是1 3 4 2 5 6 7 - topoList.add(1); - topoList.add(3); - topoList.add(4); - topoList.add(2); - topoList.add(5); - topoList.add(6); - topoList.add(7); - - assertEquals(graph.topologicalSort(),topoList); - } catch (Exception e) { - e.printStackTrace(); - fail(); - } - } - - - @Test - public void testTopologicalSort2() { - clear(); - - graph.addEdge(1, 2, null, true); - graph.addEdge(2, 3, null, true); - graph.addEdge(3, 4, null, true); - graph.addEdge(4, 5, null, true); - graph.addEdge(5, 1, null, false); //因环会添加失败,ERROR级别日志输出 - - try { - List topoList = new ArrayList<>();//拓扑结果是1 2 3 4 5 - topoList.add(1); - topoList.add(2); - topoList.add(3); - topoList.add(4); - topoList.add(5); - - assertEquals(graph.topologicalSort(),topoList); - - } catch (Exception e) { - e.printStackTrace(); - fail(); - } - - } - - - /** - * - */ - @Test - public void testTopologicalSort3() throws Exception { - clear(); - - // 1->2 - // 1->3 - // 2->5 - // 3->4 - // 4->6 - // 5->6 - // 6->7 - // 6->8 - - for (int i = 1; i <= 8; ++i) { - graph.addNode(i, "v(" + i + ")"); - } - - // 构造边 - assertTrue(graph.addEdge(1, 2)); - - assertTrue(graph.addEdge(1, 3)); - - assertTrue(graph.addEdge(2, 5)); - assertTrue(graph.addEdge(3, 4)); - - assertTrue(graph.addEdge(4, 6)); - - assertTrue(graph.addEdge(5, 6)); - - assertTrue(graph.addEdge(6, 7)); - assertTrue(graph.addEdge(6, 8)); - - - - - assertEquals(graph.getNodesCount(), 8); - - logger.info(Arrays.toString(graph.topologicalSort().toArray())); - - List expectedList = new ArrayList<>(); - - for (int i = 1; i <= 8; ++i) { - expectedList.add(i); - - logger.info(i + " subsequentNodes : " + graph.getSubsequentNodes(i)); - } - -// assertArrayEquals(expectedList.toArray(),graph.topologicalSort().toArray()); - - logger.info(6 + " previousNodesb: " + graph.getPreviousNodes(6)); - assertEquals(5, graph.getSubsequentNodes(2).toArray()[0]); - - } - -} diff --git a/escheduler-common/src/test/java/cn/escheduler/common/os/OSUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/os/OSUtilsTest.java deleted file mode 100644 index 4bf152bbf2..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/os/OSUtilsTest.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.os; - - -import cn.escheduler.common.utils.OSUtils; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import oshi.hardware.GlobalMemory; - -import java.math.RoundingMode; -import java.text.DecimalFormat; - - -/** - * OSUtilsTest - */ -public class OSUtilsTest { - - private static Logger logger = LoggerFactory.getLogger(OSUtilsTest.class); -// static SystemInfo si = new SystemInfo(); -// static HardwareAbstractionLayer hal = si.getHardware(); - - - @Test - public void getHost(){ - logger.info(OSUtils.getHost()); - } - - - @Test - public void memoryUsage() { - logger.info("memoryUsage : {}", OSUtils.memoryUsage());// 0.3361799418926239 -// printMemory(hal.getMemory());// 35 % - } - - @Test - public void availablePhysicalMemorySize() { - logger.info("availablePhysicalMemorySize : {}", OSUtils.availablePhysicalMemorySize()); - logger.info("availablePhysicalMemorySize : {}", OSUtils.totalMemorySize() / 10); - } - - - @Test - public void loadAverage() { - logger.info("memoryUsage : {}", OSUtils.loadAverage()); - } - - - private void printMemory(GlobalMemory memory) { - logger.info("memoryUsage : {} %" , (memory.getTotal() - memory.getAvailable()) * 100 / memory.getTotal() ); - } - - - @Test - public void cpuUsage() throws Exception { - logger.info("cpuUsage : {}", OSUtils.cpuUsage()); - Thread.sleep(1000l); - logger.info("cpuUsage : {}", OSUtils.cpuUsage()); - - double cpuUsage = OSUtils.cpuUsage(); - - DecimalFormat df = new DecimalFormat("0.00"); - - df.setRoundingMode(RoundingMode.HALF_UP); - - logger.info("cpuUsage1 : {}", df.format(cpuUsage)); - } - - -// -// @Test -// public void getUserList() { -// logger.info("getUserList : {}", OSUtils.getUserList()); -// } -// -// -// @Test -// public void getGroup() throws Exception { -// logger.info("getGroup : {}", OSUtils.getGroup()); -// logger.info("getGroup : {}", OSUtils.exeShell("groups")); -// -// -// } -// -// -// @Test -// public void getProcessID() { -// logger.info("getProcessID : {}", OSUtils.getProcessID()); -// } -// -// -// @Test -// public void getHost() { -// logger.info("getHost : {}", OSUtils.getHost()); -// } -// -// -// -// @Test -// public void anotherGetOsInfoTest() throws InterruptedException { -// OperatingSystemMXBean os = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class); -// final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); -// -// MemoryUsage memoryUsage = memoryMXBean.getHeapMemoryUsage(); -// double usage = (double)memoryUsage.getUsed() / (double)memoryUsage.getCommitted(); -// logger.info("memory usage : {}",usage); -// -// if (os instanceof UnixOperatingSystemMXBean) { -// UnixOperatingSystemMXBean unixOs = (UnixOperatingSystemMXBean) os; -// logger.info("getMaxFileDescriptorCount : {}" ,unixOs.getMaxFileDescriptorCount()); //10240 -// logger.info("getOpenFileDescriptorCount : {}",unixOs.getOpenFileDescriptorCount()); //241 -// logger.info("getAvailableProcessors : {}",unixOs.getAvailableProcessors()); //8 -// -// logger.info("getSystemLoadAverage : {}",unixOs.getSystemLoadAverage()); //1.36083984375 -// -// logger.info("getFreePhysicalMemorySize : {}",unixOs.getFreePhysicalMemorySize()); //209768448 -// -// logger.info("getTotalPhysicalMemorySize : {}",unixOs.getTotalPhysicalMemorySize()); //17179869184 16G -// -// for(int i = 0; i < 3; i++) { -// logger.info("getSystemCpuLoad : {}", unixOs.getSystemCpuLoad()); //0.0 -// -// logger.info("getProcessCpuLoad : {}", unixOs.getProcessCpuLoad() * 10); //0.0 -// Thread.sleep(1000l); -// } -// } -// } -// - -} diff --git a/escheduler-common/src/test/java/cn/escheduler/common/os/OshiTest.java b/escheduler-common/src/test/java/cn/escheduler/common/os/OshiTest.java deleted file mode 100644 index d27a111671..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/os/OshiTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.os; - - -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import oshi.SystemInfo; -import oshi.hardware.CentralProcessor; -import oshi.hardware.CentralProcessor.TickType; -import oshi.hardware.GlobalMemory; -import oshi.hardware.HardwareAbstractionLayer; -import oshi.util.FormatUtil; -import oshi.util.Util; - -import java.util.Arrays; - - -/** - * os information test - */ -public class OshiTest { - - private static Logger logger = LoggerFactory.getLogger(OshiTest.class); - - - @Test - public void test() { - - SystemInfo si = new SystemInfo(); - - HardwareAbstractionLayer hal = si.getHardware(); - - logger.info("Checking Memory..."); - printMemory(hal.getMemory()); - - - logger.info("Checking CPU..."); - printCpu(hal.getProcessor()); - - } - - - - private static void printMemory(GlobalMemory memory) { - - logger.info("memory avail:{} MB" , memory.getAvailable() / 1024 / 1024 );//memory avail:6863 MB - logger.info("memory total:{} MB" , memory.getTotal() / 1024 / 1024 );//memory total:16384 MB - } - - - private static void printCpu(CentralProcessor processor) { - logger.info(String.format("CPU load: %.1f%% (OS MXBean)%n", processor.getSystemCpuLoad() * 100));//CPU load: 24.9% (OS MXBean) - logger.info("CPU load averages : {}", processor.getSystemLoadAverage());//CPU load averages : 1.5234375 - - - logger.info("Uptime: " + FormatUtil.formatElapsedSecs(processor.getSystemUptime())); - logger.info("Context Switches/Interrupts: " + processor.getContextSwitches() + " / " + processor.getInterrupts()); - - - long[] prevTicks = processor.getSystemCpuLoadTicks(); - logger.info("CPU, IOWait, and IRQ ticks @ 0 sec:" + Arrays.toString(prevTicks)); - //Wait a second... - Util.sleep(1000); - long[] ticks = processor.getSystemCpuLoadTicks(); - logger.info("CPU, IOWait, and IRQ ticks @ 1 sec:" + Arrays.toString(ticks)); - long user = ticks[TickType.USER.getIndex()] - prevTicks[TickType.USER.getIndex()]; - long nice = ticks[TickType.NICE.getIndex()] - prevTicks[TickType.NICE.getIndex()]; - long sys = ticks[TickType.SYSTEM.getIndex()] - prevTicks[TickType.SYSTEM.getIndex()]; - long idle = ticks[TickType.IDLE.getIndex()] - prevTicks[TickType.IDLE.getIndex()]; - long iowait = ticks[TickType.IOWAIT.getIndex()] - prevTicks[TickType.IOWAIT.getIndex()]; - long irq = ticks[TickType.IRQ.getIndex()] - prevTicks[TickType.IRQ.getIndex()]; - long softirq = ticks[TickType.SOFTIRQ.getIndex()] - prevTicks[TickType.SOFTIRQ.getIndex()]; - long steal = ticks[TickType.STEAL.getIndex()] - prevTicks[TickType.STEAL.getIndex()]; - long totalCpu = user + nice + sys + idle + iowait + irq + softirq + steal; - - logger.info(String.format( - "User: %.1f%% Nice: %.1f%% System: %.1f%% Idle: %.1f%% IOwait: %.1f%% IRQ: %.1f%% SoftIRQ: %.1f%% Steal: %.1f%%%n", - 100d * user / totalCpu, 100d * nice / totalCpu, 100d * sys / totalCpu, 100d * idle / totalCpu, - 100d * iowait / totalCpu, 100d * irq / totalCpu, 100d * softirq / totalCpu, 100d * steal / totalCpu)); - logger.info(String.format("CPU load: %.1f%% (counting ticks)%n", processor.getSystemCpuLoadBetweenTicks() * 100)); - - - - double[] loadAverage = processor.getSystemLoadAverage(3); - logger.info("CPU load averages:" + (loadAverage[0] < 0 ? " N/A" : String.format(" %.2f", loadAverage[0])) - + (loadAverage[1] < 0 ? " N/A" : String.format(" %.2f", loadAverage[1])) - + (loadAverage[2] < 0 ? " N/A" : String.format(" %.2f", loadAverage[2]))); - // per core CPU - StringBuilder procCpu = new StringBuilder("CPU load per processor:"); - double[] load = processor.getProcessorCpuLoadBetweenTicks(); - for (double avg : load) { - procCpu.append(String.format(" %.1f%%", avg * 100)); - } - logger.info(procCpu.toString()); - } -} diff --git a/escheduler-common/src/test/java/cn/escheduler/common/queue/TaskQueueImplTest.java b/escheduler-common/src/test/java/cn/escheduler/common/queue/TaskQueueImplTest.java deleted file mode 100644 index 7b59309ff6..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/queue/TaskQueueImplTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.queue; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.utils.IpUtils; -import cn.escheduler.common.utils.OSUtils; -import cn.escheduler.common.zk.StandaloneZKServerForTest; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; -import java.util.Random; - -import static org.junit.Assert.assertEquals; - -/** - * task queue test - */ -public class TaskQueueImplTest extends StandaloneZKServerForTest { - - private static final Logger logger = LoggerFactory.getLogger(TaskQueueImplTest.class); - - ITaskQueue tasksQueue = null; - - @Before - public void before(){ - super.before(); - - tasksQueue = TaskQueueFactory.getTaskQueueInstance(); - - //clear all data - tasksQueue.delete(); - - } - - - @After - public void after(){ - //clear all data - tasksQueue.delete(); - } - - - @Test - public void testAdd(){ - - - //add - tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"1_0_1_1_-1"); - tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"0_1_1_1_-1"); - tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"0_0_0_1_" + IpUtils.ipToLong(OSUtils.getHost())); - tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE,"1_2_1_1_" + IpUtils.ipToLong(OSUtils.getHost()) + 10); - - List tasks = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, 1); - - if(tasks.size() <= 0){ - return; - } - - //pop - String node1 = tasks.get(0); - - assertEquals(node1,"0_0_0_1_" + IpUtils.ipToLong(OSUtils.getHost())); - - - } - - - - /** - * test one million data from zookeeper queue - */ - @Ignore - @Test - public void extremeTest(){ - int total = 30 * 10000; - - for(int i = 0; i < total; i++) - { - for(int j = 0; j < total; j++) { - //${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - //format ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - String formatTask = String.format("%s_%d_%s_%d", i, i + 1, j, j == 0 ? 0 : j + new Random().nextInt(100)); - tasksQueue.add(Constants.SCHEDULER_TASKS_QUEUE, formatTask); - } - } - - String node1 = tasksQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, 1).get(0); - assertEquals(node1,"0"); - - } - -} diff --git a/escheduler-common/src/test/java/cn/escheduler/common/shell/ShellExecutorTest.java b/escheduler-common/src/test/java/cn/escheduler/common/shell/ShellExecutorTest.java deleted file mode 100644 index fe80708756..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/shell/ShellExecutorTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.shell; - -import cn.escheduler.common.thread.ThreadPoolExecutors; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.concurrent.CountDownLatch; - -public class ShellExecutorTest { - private static final Logger logger = LoggerFactory.getLogger(ShellExecutorTest.class); - - @Test - public void execCommand() throws InterruptedException { - - ThreadPoolExecutors executors = ThreadPoolExecutors.getInstance(); - CountDownLatch latch = new CountDownLatch(200); - - executors.execute(new Runnable() { - @Override - public void run() { - - try { - int i =0; - while(i++ <= 100){ - String res = ShellExecutor.execCommand("groups"); - logger.info("time:" + i + ",thread id:" + Thread.currentThread().getId() + ", result:" + res.substring(0,5)); - Thread.sleep(100l); - latch.countDown(); - } - - } catch (IOException | InterruptedException e) { - e.printStackTrace(); - } - } - }); - - executors.execute(new Runnable() { - @Override - public void run() { - - try { - int i =0; - while(i++ <= 100){ - String res = ShellExecutor.execCommand("whoami"); - logger.info("time:" + i + ",thread id:" + Thread.currentThread().getId() + ", result2:" + res); - Thread.sleep(100l); - latch.countDown(); - } - - } catch (IOException | InterruptedException e) { - e.printStackTrace(); - } - } - }); - - latch.await(); - } -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/threadutils/ThreadPoolExecutorsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/threadutils/ThreadPoolExecutorsTest.java deleted file mode 100644 index 86f9642ed6..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/threadutils/ThreadPoolExecutorsTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.threadutils; - -import cn.escheduler.common.thread.ThreadPoolExecutors; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ThreadPoolExecutorsTest { - - private static final Logger logger = LoggerFactory.getLogger(ThreadPoolExecutors.class); - - - @Test - public void testThreadPoolExecutors() throws InterruptedException { - - Thread2[] threadArr = new Thread2[10]; - for (int i = 0; i < threadArr.length; i++) { - - threadArr[i] = new Thread2(); - threadArr[i].setDaemon(false); - threadArr[i].start(); - } - - Thread.currentThread().join(40000l); - } - - - //test thread - class Thread2 extends Thread { - @Override - public void run() { - logger.info(String.format("ThreadPoolExecutors instance's hashcode is: %s ",ThreadPoolExecutors.getInstance("a",2).hashCode())); - } - } - - -} diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/CollectionUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/CollectionUtilsTest.java deleted file mode 100644 index 73983115b1..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/CollectionUtilsTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - - -public class CollectionUtilsTest { - - @Test - public void equalLists() { - List a = new ArrayList(); - - a.add(1); - a.add(2); - a.add(3); - - List b = new ArrayList(); - b.add(3); - b.add(2); - b.add(1); - - Assert.assertTrue(CollectionUtils.equalLists(a,b)); - - } - - @Test - public void subtract() { - Set a = new HashSet(); - - a.add(1); - a.add(2); - a.add(3); - - Set b = new HashSet(); - b.add(0); - b.add(2); - b.add(4); - - - Assert.assertArrayEquals(new Integer[]{1,3},CollectionUtils.subtract(a,b).toArray()); - } -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/CommonUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/CommonUtilsTest.java deleted file mode 100644 index f4643e1847..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/CommonUtilsTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.InetAddress; -import java.net.UnknownHostException; - -/** - * configuration test - */ -public class CommonUtilsTest { - private static final Logger logger = LoggerFactory.getLogger(CommonUtilsTest.class); - @Test - public void getHdfsDataBasePath() { - logger.info(HadoopUtils.getHdfsDataBasePath()); - } - - @Test - public void getDownloadFilename() { - logger.info(FileUtils.getDownloadFilename("a.txt")); - } - - @Test - public void getUploadFilename() { - logger.info(FileUtils.getUploadFilename("1234", "a.txt")); - } - - @Test - public void getHdfsDir() { - logger.info(HadoopUtils.getHdfsResDir("1234")); - } - - @Test - public void test(){ - InetAddress IP = null; - try { - IP = InetAddress.getLocalHost(); - logger.info(IP.getHostAddress()); - } catch (UnknownHostException e) { - e.printStackTrace(); - } - } -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/DateUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/DateUtilsTest.java deleted file mode 100644 index 54ce1a3c9c..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/DateUtilsTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.junit.Assert; -import org.junit.Test; - -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Date; - -public class DateUtilsTest { - - @Test - public void format2Readable() throws ParseException { - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - - String start = "2015-12-21 18:00:36"; - Date startDate = sdf.parse(start); - - String end = "2015-12-23 03:23:44"; - Date endDate = sdf.parse(end); - - String readableDate = DateUtils.format2Readable(endDate.getTime() - startDate.getTime()); - - Assert.assertEquals("01 09:23:08", readableDate); - } - - - @Test - public void testWeek(){ - - Date curr = DateUtils.stringToDate("2019-02-01 00:00:00"); - Date monday1 = DateUtils.stringToDate("2019-01-28 00:00:00"); - Date sunday1 = DateUtils.stringToDate("2019-02-03 00:00:00"); - Date monday = DateUtils.getMonday(curr); - Date sunday = DateUtils.getSunday(monday); - - Assert.assertEquals(monday, monday1); - Assert.assertEquals(sunday, sunday1); - - } -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/DependentUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/DependentUtilsTest.java deleted file mode 100644 index 8680ab0cb4..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/DependentUtilsTest.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.enums.DependResult; -import cn.escheduler.common.enums.DependentRelation; -import cn.escheduler.common.model.DateInterval; -import cn.escheduler.common.shell.ShellExecutorTest; -import cn.escheduler.common.utils.dependent.DependentDateUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -public class DependentUtilsTest { - private static final Logger logger = LoggerFactory.getLogger(ShellExecutorTest.class); - @Test - public void getDependResultForRelation() { - - DependentRelation dependentRelation = DependentRelation.AND; - List dependResultList = new ArrayList<>(); - dependResultList.add(DependResult.FAILED); - dependResultList.add(DependResult.SUCCESS); - DependResult result = DependentUtils.getDependResultForRelation( dependentRelation, dependResultList); - Assert.assertEquals(result, DependResult.FAILED); - - dependentRelation = DependentRelation.OR; - - Assert.assertEquals(DependentUtils.getDependResultForRelation( dependentRelation, dependResultList), - DependResult.SUCCESS); - } - - @Test - public void getDateIntervalList() { - - Date curDay = DateUtils.stringToDate("2019-02-05 00:00:00"); - - DateInterval diCur = new DateInterval(DateUtils.getStartOfDay(curDay), - DateUtils.getEndOfDay(curDay)); - - Date day1 = DateUtils.stringToDate("2019-02-04 00:00:00"); - DateInterval di1 = new DateInterval(DateUtils.getStartOfDay(day1), - DateUtils.getEndOfDay(day1)); - Date day2 = DateUtils.stringToDate("2019-02-03 00:00:00"); - DateInterval di2 = new DateInterval(DateUtils.getStartOfDay(day2), - DateUtils.getEndOfDay(day2)); - String dateValue = "last1Days"; - List dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); - Assert.assertEquals(dateIntervals.get(0), di1); - - dateValue = "last2Days"; - dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); - for(DateInterval dateInterval : dateIntervals){ - logger.info(dateInterval.getStartTime().toString() + " == " + dateInterval.getEndTime().toString()); - } - - Assert.assertEquals(dateIntervals.get(1), di1); - Assert.assertEquals(dateIntervals.get(0), di2); - - dateValue = "today"; - dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); - Assert.assertEquals(dateIntervals.get(0), diCur); - - - dateValue = "thisWeek"; - Date firstWeekDay = DateUtils.getMonday(curDay); - dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); - - DateInterval weekHead = new DateInterval(DateUtils.getStartOfDay(firstWeekDay), DateUtils.getEndOfDay(firstWeekDay)); - DateInterval weekThis = new DateInterval(DateUtils.getStartOfDay(curDay), DateUtils.getEndOfDay(curDay)); - - Assert.assertEquals(dateIntervals.get(0), weekHead); - Assert.assertEquals(dateIntervals.get(dateIntervals.size() - 1), weekThis); - - - dateValue = "thisMonth"; - Date firstMonthDay = DateUtils.getFirstDayOfMonth(curDay); - dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); - - DateInterval monthHead = new DateInterval(DateUtils.getStartOfDay(firstMonthDay), DateUtils.getEndOfDay(firstMonthDay)); - DateInterval monthThis = new DateInterval(DateUtils.getStartOfDay(curDay), DateUtils.getEndOfDay(curDay)); - - Assert.assertEquals(dateIntervals.get(0), monthHead); - Assert.assertEquals(dateIntervals.get(dateIntervals.size() - 1), monthThis); - - } - - @Test - public void testWeek(){ - - Date curDay = DateUtils.stringToDate("2019-02-05 00:00:00"); - Date day1 = DateUtils.stringToDate("2019-01-28 00:00:00"); - DateInterval di1 = new DateInterval(DateUtils.getStartOfDay(day1), - DateUtils.getEndOfDay(day1)); - - Date day2 = DateUtils.stringToDate("2019-01-29 00:00:00"); - DateInterval di2 = new DateInterval(DateUtils.getStartOfDay(day2), - DateUtils.getEndOfDay(day2)); - Date day3 = DateUtils.stringToDate("2019-01-30 00:00:00"); - DateInterval di3 = new DateInterval(DateUtils.getStartOfDay(day3), - DateUtils.getEndOfDay(day3)); - Date day4 = DateUtils.stringToDate("2019-01-31 00:00:00"); - DateInterval di4 = new DateInterval(DateUtils.getStartOfDay(day4), - DateUtils.getEndOfDay(day4)); - Date day5 = DateUtils.stringToDate("2019-02-01 00:00:00"); - DateInterval di5 = new DateInterval(DateUtils.getStartOfDay(day5), - DateUtils.getEndOfDay(day5)); - Date day6 = DateUtils.stringToDate("2019-02-02 00:00:00"); - DateInterval di6 = new DateInterval(DateUtils.getStartOfDay(day6), - DateUtils.getEndOfDay(day6)); - Date day7 = DateUtils.stringToDate("2019-02-03 00:00:00"); - DateInterval di7 = new DateInterval(DateUtils.getStartOfDay(day7), - DateUtils.getEndOfDay(day7)); - List dateIntervals = DependentDateUtils.getLastWeekInterval(curDay); - Assert.assertEquals(dateIntervals.size(), 7); - Assert.assertEquals(dateIntervals.get(0), di1); - Assert.assertEquals(dateIntervals.get(1), di2); - Assert.assertEquals(dateIntervals.get(2), di3); - Assert.assertEquals(dateIntervals.get(3), di4); - - List monday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 1); - Assert.assertEquals(monday.get(0), di1); - List tuesday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 2); - Assert.assertEquals(tuesday.get(0), di2); - List wednesday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 3); - Assert.assertEquals(wednesday.get(0), di3); - List thursday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 4); - Assert.assertEquals(thursday.get(0), di4); - List friday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 5); - Assert.assertEquals(friday.get(0), di5); - List saturday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 6); - Assert.assertEquals(saturday.get(0), di6); - List sunday = DependentDateUtils.getLastWeekOneDayInterval(curDay, 7); - Assert.assertEquals(sunday.get(0), di7); - } - - @Test - public void testHour(){ - - Date curDay = DateUtils.stringToDate("2019-02-05 12:10:00"); - Date day1 = DateUtils.stringToDate("2019-02-05 11:00:00"); - DateInterval di1 = new DateInterval(DateUtils.getStartOfHour(day1), - DateUtils.getEndOfHour(day1)); - Date day2 = DateUtils.stringToDate("2019-02-05 10:00:00"); - DateInterval di2 = new DateInterval(DateUtils.getStartOfHour(day2), - DateUtils.getEndOfHour(day2)); - Date day3 = DateUtils.stringToDate("2019-02-05 09:00:00"); - DateInterval di3 = new DateInterval(DateUtils.getStartOfHour(day3), - DateUtils.getEndOfHour(day3)); - - List dateIntervals = DependentDateUtils.getLastHoursInterval(curDay, 1); - Assert.assertEquals(dateIntervals.get(0), di1); - dateIntervals = DependentDateUtils.getLastHoursInterval(curDay, 2); - Assert.assertEquals(dateIntervals.get(1), di1); - Assert.assertEquals(dateIntervals.get(0), di2); - dateIntervals = DependentDateUtils.getLastHoursInterval(curDay, 3); - Assert.assertEquals(dateIntervals.get(2), di1); - Assert.assertEquals(dateIntervals.get(1), di2); - Assert.assertEquals(dateIntervals.get(0), di3); - - } - - - @Test - public void testMonth(){ - Date curDay = DateUtils.stringToDate("2019-02-05 00:00:00"); - Date day1 = DateUtils.stringToDate("2019-01-01 00:00:00"); - DateInterval di1 = new DateInterval(DateUtils.getStartOfDay(day1), - DateUtils.getEndOfDay(day1)); - - Date day2 = DateUtils.stringToDate("2019-01-31 00:00:00"); - DateInterval di2 = new DateInterval(DateUtils.getStartOfDay(day2), - DateUtils.getEndOfDay(day2)); - - List dateIntervals = DependentDateUtils.getLastMonthInterval(curDay); - - Assert.assertEquals(dateIntervals.size(), 31); - Assert.assertEquals(dateIntervals.get(0), di1); - Assert.assertEquals(dateIntervals.get(30), di2); - } - -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/FileUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/FileUtilsTest.java deleted file mode 100644 index fb9d78ad23..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/FileUtilsTest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.junit.Assert; -import org.junit.Test; - -public class FileUtilsTest { - - @Test - public void suffix() { - Assert.assertEquals(FileUtils.suffix("ninfor.java"),"java"); - } -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/HadoopUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/HadoopUtilsTest.java deleted file mode 100644 index 47e76e4286..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/HadoopUtilsTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.List; - -@Ignore -public class HadoopUtilsTest { - - private static final Logger logger = LoggerFactory.getLogger(HadoopUtilsTest.class); - - @Test - public void getActiveRMTest() { - logger.info(HadoopUtils.getAppAddress("http://ark1:8088/ws/v1/cluster/apps/%s","192.168.xx.xx,192.168.xx.xx")); - } - - @Test - public void getApplicationStatusAddressTest(){ - logger.info(HadoopUtils.getInstance().getApplicationUrl("application_1548381297012_0030")); - } - - @Test - public void test() throws IOException { - HadoopUtils.getInstance().copyLocalToHdfs("/root/teamviewer_13.1.8286.x86_64.rpm", "/journey", true, true); - } - - @Test - public void readFileTest(){ - try { - byte[] bytes = HadoopUtils.getInstance().catFile("/escheduler/hdfs/resources/35435.sh"); - logger.info("------------------start"); - logger.info(new String(bytes)); - logger.info("---------------------end"); - } catch (Exception e) { - - } - } - @Test - public void testCapacity(){ - - } - @Test - public void testMove(){ - HadoopUtils instance = HadoopUtils.getInstance(); - try { - instance.copy("/opt/apptest/test.dat","/opt/apptest/test.dat.back",true,true); - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - - - } - - @Test - public void getApplicationStatus() { - logger.info(HadoopUtils.getInstance().getApplicationStatus("application_1542010131334_0029").toString()); - } - - @Test - public void getApplicationUrl(){ - String application_1516778421218_0042 = HadoopUtils.getInstance().getApplicationUrl("application_1529051418016_0167"); - logger.info(application_1516778421218_0042); - } - - @Test - public void catFileTest()throws Exception{ - List stringList = HadoopUtils.getInstance().catFile("/escheduler/hdfs/resources/WCSparkPython.py", 0, 1000); - logger.info(String.join(",",stringList)); - } -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/HttpUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/HttpUtilsTest.java deleted file mode 100644 index df0f78405d..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/HttpUtilsTest.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import com.alibaba.fastjson.JSON; -import com.alibaba.fastjson.JSONObject; -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * HttpClient utils test - */ -public class HttpUtilsTest { - - - public static final Logger logger = LoggerFactory.getLogger(HttpUtilsTest.class); - - - @Ignore - @Test - public void getTest(){ - - String result = HttpUtils.get("http://192.168.xx.xx:8088/ws/v1/cluster/info"); - logger.info(result); - - - JSONObject jsonObject = JSON.parseObject(result); - String string = jsonObject.getJSONObject("clusterInfo").getString("haState"); - logger.info(string); - } -} diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/IpUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/IpUtilsTest.java deleted file mode 100644 index 11a03a2334..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/IpUtilsTest.java +++ /dev/null @@ -1,41 +0,0 @@ -package cn.escheduler.common.utils; - -import org.junit.Assert; -import org.junit.Test; - -import static org.junit.Assert.*; - -public class IpUtilsTest { - - @Test - public void ipToLong() { - - String ip = "192.168.110.1"; - String ip2 = "0.0.0.0"; - long longNumber = IpUtils.ipToLong(ip); - long longNumber2 = IpUtils.ipToLong(ip2); - System.out.println(longNumber); - Assert.assertEquals(longNumber, 3232263681L); - Assert.assertEquals(longNumber2, 0L); - - String ip3 = "255.255.255.255"; - long longNumber3 = IpUtils.ipToLong(ip3); - System.out.println(longNumber3); - Assert.assertEquals(longNumber3, 4294967295L); - - } - - @Test - public void longToIp() { - - String ip = "192.168.110.1"; - String ip2 = "0.0.0.0"; - long longNum = 3232263681L; - String i1 = IpUtils.longToIp(longNum); - - String i2 = IpUtils.longToIp(0); - - Assert.assertEquals(ip, i1); - Assert.assertEquals(ip2, i2); - } -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/JSONUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/JSONUtilsTest.java deleted file mode 100644 index 78631ff702..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/JSONUtilsTest.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.enums.DataType; -import cn.escheduler.common.enums.Direct; -import cn.escheduler.common.process.Property; -import com.alibaba.fastjson.JSONObject; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.JsonNodeFactory; -import org.junit.Assert; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -public class JSONUtilsTest { - - - @Test - public void toMap() { - - String jsonStr = "{\"id\":\"1001\",\"name\":\"Jobs\"}"; - - Map models = JSONUtils.toMap(jsonStr); - Assert.assertEquals(models.get("id"), "1001"); - Assert.assertEquals(models.get("name"), "Jobs"); - - } - - @Test - public void convert2Property(){ - Property property = new Property(); - property.setProp("ds"); - property.setDirect(Direct.IN); - property.setType(DataType.VARCHAR); - property.setValue("sssssss"); - String str = "{\"direct\":\"IN\",\"prop\":\"ds\",\"type\":\"VARCHAR\",\"value\":\"sssssss\"}"; - Property property1 = JSONObject.parseObject(str, Property.class); - Direct direct = property1.getDirect(); - Assert.assertEquals(direct , Direct.IN); - } - - - @Test - public void String2MapTest(){ - String str = list2String(); - - List maps = JSONUtils.toList(str, - LinkedHashMap.class); - - Assert.assertEquals(maps.size(), 1); - Assert.assertEquals(maps.get(0).get("mysql服务名称"), "mysql200"); - Assert.assertEquals(maps.get(0).get("mysql地址"), "192.168.xx.xx"); - Assert.assertEquals(maps.get(0).get("端口"), "3306"); - Assert.assertEquals(maps.get(0).get("期间内没有使用索引的查询数握"), "80"); - Assert.assertEquals(maps.get(0).get("数据库客户端连接数"), "190"); - } - - public String list2String(){ - - LinkedHashMap map1 = new LinkedHashMap<>(); - map1.put("mysql服务名称","mysql200"); - map1.put("mysql地址","192.168.xx.xx"); - map1.put("端口","3306"); - map1.put("期间内没有使用索引的查询数握","80"); - map1.put("数据库客户端连接数","190"); - - List> maps = new ArrayList<>(); - maps.add(0,map1); - String resultJson = JSONUtils.toJson(maps); - return resultJson; - } - - @Test - public void testToJson() { - Map map = new HashMap<>(); - map.put("foo","bar"); - - Assert.assertEquals("{\"foo\":\"bar\"}", JSONUtils.toJson(map)); - Assert.assertEquals( - String.valueOf((Object) null), JSONUtils.toJson(null)); - } - - @Test - public void testParseObject() { - Assert.assertEquals("{\"foo\":\"bar\"}", JSONUtils.parseObject( - "{\n" + "\"foo\": \"bar\",\n" + "}", String.class)); - - Assert.assertNull(JSONUtils.parseObject("", null)); - Assert.assertNull(JSONUtils.parseObject("foo", String.class)); - } - - @Test - public void testToList() { - Assert.assertEquals(new ArrayList(), - JSONUtils.toList("A1B2C3", null)); - Assert.assertEquals(new ArrayList(), - JSONUtils.toList("", null)); - } - - @Test - public void testCheckJsonVaild() { - Assert.assertTrue(JSONUtils.checkJsonVaild("3")); - Assert.assertFalse(JSONUtils.checkJsonVaild("")); - } - - @Test - public void testFindValue() { - Assert.assertNull(JSONUtils.findValue( - new ArrayNode(new JsonNodeFactory(true)), null)); - } - - @Test - public void testToMap() { - Map map = new HashMap<>(); - map.put("foo","bar"); - - Assert.assertTrue(map.equals(JSONUtils.toMap( - "{\n" + "\"foo\": \"bar\",\n" + "}"))); - - Assert.assertFalse(map.equals(JSONUtils.toMap( - "{\n" + "\"bar\": \"foo\",\n" + "}"))); - - Assert.assertNull(JSONUtils.toMap("3")); - Assert.assertNull(JSONUtils.toMap(null)); - Assert.assertNull(JSONUtils.toMap("3", null, null)); - Assert.assertNull(JSONUtils.toMap(null, null, null)); - } - - @Test - public void testToJsonString() { - Map map = new HashMap<>(); - map.put("foo", "bar"); - - Assert.assertEquals("{\"foo\":\"bar\"}", - JSONUtils.toJsonString(map)); - Assert.assertEquals(String.valueOf((Object) null), - JSONUtils.toJsonString(null)); - } -} diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/PropertyUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/PropertyUtilsTest.java deleted file mode 100644 index f1cb6ca4f8..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/PropertyUtilsTest.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import cn.escheduler.common.Constants; -import org.junit.Test; - -import static org.junit.Assert.assertNotNull; - -public class PropertyUtilsTest { - - @Test - public void getString() { - assertNotNull(PropertyUtils.getString(Constants.FS_DEFAULTFS)); - assertNotNull(PropertyUtils.getInt("spring.redis.port")); - } -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/StringTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/StringTest.java deleted file mode 100644 index 4b753d4264..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/StringTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.List; - -public class StringTest { - - - @Test - public void test1(){ - System.out.println(String.format("%s_%010d_%010d", String.valueOf(1), Long.valueOf(3), Integer.valueOf(4))); - } - - @Test - public void stringCompareTest(){ - - for(int j = 0; j < 5; j++) { - long start = System.currentTimeMillis(); - int size = 10000; - - List taskList = new ArrayList<>(size); - - //init - for (int i = 0; i < size; i++) { - taskList.add(String.format("%d_%010d_%010d", 1, i, i + 1)); - } - - String origin = taskList.get(0); - for (int i = 1; i < taskList.size(); i++) { - String str = taskList.get(i); - int result = str.compareTo(origin); - if (result < 0) { - origin = str; - } - } - double during = (System.currentTimeMillis() - start) / 1000.0; - System.out.println(during); - Assert.assertEquals("1_0000000000_0000000001", origin); - } - } -} diff --git a/escheduler-common/src/test/java/cn/escheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java b/escheduler-common/src/test/java/cn/escheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java deleted file mode 100644 index 66e32ecd5e..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.utils.placeholder; - -import cn.escheduler.common.utils.DateUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.util.Date; - -import static cn.escheduler.common.utils.placeholder.TimePlaceholderUtils.*; - -public class TimePlaceholderUtilsTest { - - Date date = null; - - @Before - public void init(){ - date = DateUtils.parse("20170101010101","yyyyMMddHHmmss"); - } - - @Test - public void replacePlaceholdersT() { - Assert.assertEquals("2017test12017:***2016-12-31,20170102,20170130,20161227,20161231", replacePlaceholders("$[yyyy]test1$[yyyy:***]$[yyyy-MM-dd-1],$[month_begin(yyyyMMdd, 1)],$[month_end(yyyyMMdd, -1)],$[week_begin(yyyyMMdd, 1)],$[week_end(yyyyMMdd, -1)]", - date, true)); - - Assert.assertEquals("1483200061,1483290061,1485709261,1482771661,1483113600,1483203661", replacePlaceholders("$[timestamp(yyyyMMdd00mmss)]," - + "$[timestamp(month_begin(yyyyMMddHHmmss, 1))]," - + "$[timestamp(month_end(yyyyMMddHHmmss, -1))]," - + "$[timestamp(week_begin(yyyyMMddHHmmss, 1))]," - + "$[timestamp(week_end(yyyyMMdd000000, -1))]," - + "$[timestamp(yyyyMMddHHmmss)]", - date, true)); - } - - - - @Test - public void calcMinutesT() { - Assert.assertEquals("Sun Jan 01 01:01:01 CST 2017=yyyy", calcMinutes("yyyy", date).toString()); - Assert.assertEquals("Sun Jan 08 01:01:01 CST 2017=yyyyMMdd", calcMinutes("yyyyMMdd+7*1", date).toString()); - Assert.assertEquals("Sun Dec 25 01:01:01 CST 2016=yyyyMMdd", calcMinutes("yyyyMMdd-7*1", date).toString()); - Assert.assertEquals("Mon Jan 02 01:01:01 CST 2017=yyyyMMdd", calcMinutes("yyyyMMdd+1", date).toString()); - Assert.assertEquals("Sat Dec 31 01:01:01 CST 2016=yyyyMMdd", calcMinutes("yyyyMMdd-1", date).toString()); - Assert.assertEquals("Sun Jan 01 02:01:01 CST 2017=yyyyMMddHH", calcMinutes("yyyyMMddHH+1/24", date).toString()); - Assert.assertEquals("Sun Jan 01 00:01:01 CST 2017=yyyyMMddHH", calcMinutes("yyyyMMddHH-1/24", date).toString()); - } - - @Test - public void calcMonthsT() { - Assert.assertEquals("Mon Jan 01 01:01:01 CST 2018=yyyyMMdd", calcMonths("add_months(yyyyMMdd,12*1)", date).toString()); - Assert.assertEquals("Fri Jan 01 01:01:01 CST 2016=yyyyMMdd", calcMonths("add_months(yyyyMMdd,-12*1)", date).toString()); - } - -} \ No newline at end of file diff --git a/escheduler-common/src/test/java/cn/escheduler/common/zk/StandaloneZKServerForTest.java b/escheduler-common/src/test/java/cn/escheduler/common/zk/StandaloneZKServerForTest.java deleted file mode 100644 index b9562ec0d0..0000000000 --- a/escheduler-common/src/test/java/cn/escheduler/common/zk/StandaloneZKServerForTest.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.common.zk; - -import java.io.File; -import java.util.Properties; - -import cn.escheduler.common.thread.ThreadPoolExecutors; -import org.apache.zookeeper.server.ServerConfig; -import org.apache.zookeeper.server.ZooKeeperServerMain; -import org.apache.zookeeper.server.quorum.QuorumPeerConfig; -import org.junit.Before; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -/** - * just for test - */ -public class StandaloneZKServerForTest { - - private static final Logger logger = LoggerFactory.getLogger(StandaloneZKServerForTest.class); - - private static volatile ZooKeeperServerMain zkServer = null; - - - @Before - public void before() { - logger.info("standalone zookeeper server for test service start "); - - ThreadPoolExecutors.getInstance().execute(new Runnable() { - @Override - public void run() { - - //delete zk data dir ? - File zkFile = new File(System.getProperty("java.io.tmpdir"), "zookeeper"); -// if(zkFile.exists()){ -// zkFile.delete(); -// } - startStandaloneServer("2000", zkFile.getAbsolutePath(), "2181", "10", "5"); - } - }); - - } - - - /** - * start zk server - * @param tickTime zookeeper ticktime - * @param dataDir zookeeper data dir - * @param clientPort zookeeper client port - * @param initLimit zookeeper init limit - * @param syncLimit zookeeper sync limit - */ - private void startStandaloneServer(String tickTime, String dataDir, String clientPort, String initLimit, String syncLimit) { - Properties props = new Properties(); - props.setProperty("tickTime", tickTime); - props.setProperty("dataDir", dataDir); - props.setProperty("clientPort", clientPort); - props.setProperty("initLimit", initLimit); - props.setProperty("syncLimit", syncLimit); - - QuorumPeerConfig quorumConfig = new QuorumPeerConfig(); - try { - quorumConfig.parseProperties(props); - - if(zkServer == null ){ - - synchronized (StandaloneZKServerForTest.class){ - if(zkServer == null ){ - zkServer = new ZooKeeperServerMain(); - final ServerConfig config = new ServerConfig(); - config.readFrom(quorumConfig); - zkServer.runFromConfig(config); - } - } - - } - - } catch (Exception e) { - logger.error("start standalone server fail!", e); - } - } - - -} \ No newline at end of file diff --git a/escheduler-dao/pom.xml b/escheduler-dao/pom.xml deleted file mode 100644 index 710f862263..0000000000 --- a/escheduler-dao/pom.xml +++ /dev/null @@ -1,192 +0,0 @@ - - - 4.0.0 - - cn.analysys - escheduler - 1.1.0-SNAPSHOT - - escheduler-dao - escheduler-dao - http://maven.apache.org - - UTF-8 - - - - junit - junit - test - - - com.baomidou - mybatis-plus - ${mybatis-plus.version} - - - com.baomidou - mybatis-plus-boot-starter - ${mybatis-plus.version} - - - - - - - - org.postgresql - postgresql - - - org.projectlombok - lombok - ${lombok.version} - - - org.springframework.boot - spring-boot-starter-test - test - - - org.ow2.asm - asm - - - org.springframework.boot - spring-boot - - - org.springframework.boot - spring-boot-autoconfigure - - - - - - mysql - mysql-connector-java - - - - com.alibaba - druid - - - - ch.qos.logback - logback-classic - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-databind - - - org.apache.httpcomponents - httpclient - - - commons-httpclient - commons-httpclient - - - org.apache.commons - commons-lang3 - - - commons-lang - commons-lang - - - - com.alibaba - fastjson - compile - - - - com.cronutils - cron-utils - - - - org.quartz-scheduler - quartz - - - c3p0 - c3p0 - - - - - - org.quartz-scheduler - quartz-jobs - - - commons-configuration - commons-configuration - - - cn.analysys - escheduler-common - - - protobuf-java - com.google.protobuf - - - - - org.springframework - spring-test - test - - - io.swagger - swagger-annotations - 1.5.20 - compile - - - org.yaml - snakeyaml - - - - - - - - src/main/java - - **/*.xml - - false - - - src/main/resources - - **/*.xml - **/*.yml - - false - - - - - org.apache.maven.plugins - maven-compiler-plugin - - ${java.version} - ${java.version} - ${project.build.sourceEncoding} - - - - - diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/AbstractBaseDao.java b/escheduler-dao/src/main/java/cn/escheduler/dao/AbstractBaseDao.java deleted file mode 100644 index ce18b15528..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/AbstractBaseDao.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao; - -/** - * base dao - */ -public abstract class AbstractBaseDao { - - protected abstract void init(); - - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/AlertDao.java b/escheduler-dao/src/main/java/cn/escheduler/dao/AlertDao.java deleted file mode 100644 index 237872fc08..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/AlertDao.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao; - -import cn.escheduler.common.enums.AlertStatus; -import cn.escheduler.common.enums.AlertType; -import cn.escheduler.common.enums.ShowType; -import cn.escheduler.dao.entity.Alert; -import cn.escheduler.dao.mapper.AlertMapper; -import cn.escheduler.dao.mapper.UserAlertGroupMapper; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.User; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import java.util.Date; -import java.util.List; - -@Component -public class AlertDao extends AbstractBaseDao { - - private final Logger logger = LoggerFactory.getLogger(getClass()); - - @Autowired - private AlertMapper alertMapper; - - @Autowired - private UserAlertGroupMapper userAlertGroupMapper; - - @Override - protected void init() { -// alertMapper = ConnectionFactory.getSqlSession().getMapper(AlertMapper.class); -// userAlertGroupMapper = ConnectionFactory.getSqlSession().getMapper(UserAlertGroupMapper.class); - } - - /** - * insert alert - * @param alert - * @return - */ - public int addAlert(Alert alert){ - return alertMapper.insert(alert); - } - - /** - * update alert - * @param alertStatus - * @param log - * @param id - * @return - */ - public int updateAlert(AlertStatus alertStatus,String log,int id){ - Alert alert = alertMapper.selectById(id); - alert.setAlertStatus(alertStatus); - alert.setUpdateTime(new Date()); - alert.setLog(log); - return alertMapper.updateById(alert); - } - - /** - * query user list by alert group id - * @param alerGroupId - * @return - */ - public List queryUserByAlertGroupId(int alerGroupId){ - - return userAlertGroupMapper.queryForUser(alerGroupId); - } - /** - * MasterServer or WorkerServer stoped - */ - public void sendServerStopedAlert(int alertgroupId,String host,String serverType){ - Alert alert = new Alert(); - String content = String.format("[{'type':'%s','host':'%s','event':'server down','warning level':'serious'}]", - serverType, host); - alert.setTitle("Fault tolerance warning"); - alert.setShowType(ShowType.TABLE); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(alertgroupId); - alert.setCreateTime(new Date()); - alert.setUpdateTime(new Date()); - alertMapper.insert(alert); - } - - /** - * process time out alert - * @param processInstance - * @param processDefinition - */ - public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition){ - int alertgroupId = processInstance.getWarningGroupId(); - String receivers = processDefinition.getReceivers(); - String receiversCc = processDefinition.getReceiversCc(); - Alert alert = new Alert(); - String content = String.format("[{'id':'%d','name':'%s','event':'timeout','warnLevel':'middle'}]", - processInstance.getId(), processInstance.getName()); - alert.setTitle("Process Timeout Warn"); - alert.setShowType(ShowType.TABLE); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(alertgroupId); - if (StringUtils.isNotEmpty(receivers)) { - alert.setReceivers(receivers); - } - if (StringUtils.isNotEmpty(receiversCc)) { - alert.setReceiversCc(receiversCc); - } - alert.setCreateTime(new Date()); - alert.setUpdateTime(new Date()); - alertMapper.insert(alert); - } - - /** - * task timeout warn - */ - public void sendTaskTimeoutAlert(int alertgroupId,String receivers,String receiversCc,int taskId,String taskName){ - Alert alert = new Alert(); - String content = String.format("[{'id':'%d','name':'%s','event':'timeout','warnLevel':'middle'}]",taskId,taskName); - alert.setTitle("Task Timeout Warn"); - alert.setShowType(ShowType.TABLE); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(alertgroupId); - if (StringUtils.isNotEmpty(receivers)) { - alert.setReceivers(receivers); - } - if (StringUtils.isNotEmpty(receiversCc)) { - alert.setReceiversCc(receiversCc); - } - alert.setCreateTime(new Date()); - alert.setUpdateTime(new Date()); - alertMapper.insert(alert); - } - - /** - * list the alert information of waiting to be executed - * @return - */ - public List listWaitExecutionAlert(){ - return alertMapper.listAlertByStatus(AlertStatus.WAIT_EXECUTION); - } - - /** - * list user information by alert group id - * @param alergroupId - * @return - */ - public List listUserByAlertgroupId(int alergroupId){ - return userAlertGroupMapper.listUserByAlertgroupId(alergroupId); - } - - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/App.java b/escheduler-dao/src/main/java/cn/escheduler/dao/App.java deleted file mode 100644 index 9534041ae6..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/App.java +++ /dev/null @@ -1,11 +0,0 @@ -package cn.escheduler.dao; - -import org.springframework.boot.SpringApplication; -import org.springframework.boot.autoconfigure.SpringBootApplication; - -@SpringBootApplication -public class App { - public static void main(String[] args){ - SpringApplication.run(App.class); - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/DaoFactory.java b/escheduler-dao/src/main/java/cn/escheduler/dao/DaoFactory.java deleted file mode 100644 index 8237a85805..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/DaoFactory.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao; - -import cn.escheduler.dao.utils.BeanContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -/** - * dao factory - */ -public class DaoFactory { - - private static final Logger logger = LoggerFactory.getLogger(DaoFactory.class); - - private static Map daoMap = new ConcurrentHashMap<>(); - - private DaoFactory(){ - - } - - /** - * 获取 Dao 实例 - * - * @param clazz - * @return Dao实例 - */ - @SuppressWarnings("unchecked") - public static T getDaoInstance(Class clazz) { - String className = clazz.getName(); - synchronized (daoMap) { - if (!daoMap.containsKey(className)) { - try { -// T t = BeanContext.getBean(clazz); - T t = clazz.getConstructor().newInstance(); - // 实例初始化 - t.init(); - daoMap.put(className, t); - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - } - } - - return (T) daoMap.get(className); - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/MonitorDBDao.java b/escheduler-dao/src/main/java/cn/escheduler/dao/MonitorDBDao.java deleted file mode 100644 index 1aafac7266..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/MonitorDBDao.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao; - -import cn.escheduler.common.Constants; -import cn.escheduler.dao.entity.MonitorRecord; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.*; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - - -/** - * database state dao - */ -public class MonitorDBDao { - - private static Logger logger = LoggerFactory.getLogger(MonitorDBDao.class); - public static final String VARIABLE_NAME = "variable_name"; - - /** - * 加载配置文件 - */ - private static Configuration conf; - - static { - try { - conf = new PropertiesConfiguration(Constants.DATA_SOURCE_PROPERTIES); - }catch (ConfigurationException e){ - logger.error("load configuration excetpion",e); - System.exit(1); - } - } - - /** - * create connection - * @return - */ - private static Connection getConn() { - String url = conf.getString(Constants.SPRING_DATASOURCE_URL); - String username = conf.getString(Constants.SPRING_DATASOURCE_USERNAME); - String password = conf.getString(Constants.SPRING_DATASOURCE_PASSWORD); - Connection conn = null; - try { - //classloader,load driver - Class.forName(Constants.JDBC_MYSQL_CLASS_NAME); - conn = DriverManager.getConnection(url, username, password); - } catch (ClassNotFoundException e) { - logger.error("ClassNotFoundException ", e); - } catch (SQLException e) { - logger.error("SQLException ", e); - } - return conn; - } - - - /** - * query database state - * @return - */ - public static List queryDatabaseState() { - List list = new ArrayList<>(1); - - Connection conn = null; - long maxConnections = 0; - long maxUsedConnections = 0; - long threadsConnections = 0; - long threadsRunningConnections = 0; - //mysql running state - int state = 1; - - - MonitorRecord monitorRecord = new MonitorRecord(); - try { - conn = getConn(); - if(conn == null){ - return list; - } - - Statement pstmt = conn.createStatement(); - - ResultSet rs1 = pstmt.executeQuery("show global variables"); - while(rs1.next()){ - if(rs1.getString(VARIABLE_NAME).toUpperCase().equals("MAX_CONNECTIONS")){ - maxConnections= Long.parseLong(rs1.getString("value")); - } - } - - ResultSet rs2 = pstmt.executeQuery("show global status"); - while(rs2.next()){ - if(rs2.getString(VARIABLE_NAME).toUpperCase().equals("MAX_USED_CONNECTIONS")){ - maxUsedConnections = Long.parseLong(rs2.getString("value")); - }else if(rs2.getString(VARIABLE_NAME).toUpperCase().equals("THREADS_CONNECTED")){ - threadsConnections = Long.parseLong(rs2.getString("value")); - }else if(rs2.getString(VARIABLE_NAME).toUpperCase().equals("THREADS_RUNNING")){ - threadsRunningConnections= Long.parseLong(rs2.getString("value")); - } - } - - - } catch (SQLException e) { - logger.error("SQLException ", e); - state = 0; - }finally { - try { - if(conn != null){ - conn.close(); - } - } catch (SQLException e) { - logger.error("SQLException ", e); - } - } - - monitorRecord.setDate(new Date()); - monitorRecord.setMaxConnections(maxConnections); - monitorRecord.setMaxUsedConnections(maxUsedConnections); - monitorRecord.setThreadsConnections(threadsConnections); - monitorRecord.setThreadsRunningConnections(threadsRunningConnections); - monitorRecord.setState(state); - - list.add(monitorRecord); - - return list; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java b/escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java deleted file mode 100644 index 1f705f64a6..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java +++ /dev/null @@ -1,1747 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.*; -import cn.escheduler.common.model.DateInterval; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.queue.ITaskQueue; -import cn.escheduler.common.queue.TaskQueueFactory; -import cn.escheduler.common.task.subprocess.SubProcessParameters; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.common.utils.IpUtils; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.*; -import cn.escheduler.dao.mapper.*; -import cn.escheduler.dao.utils.cron.CronUtils; -import com.alibaba.fastjson.JSONObject; -import com.cronutils.model.Cron; -import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.lang3.StringUtils; -import org.quartz.CronExpression; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; -import org.springframework.transaction.annotation.Transactional; - -import java.util.*; -import java.util.stream.Collectors; - -import static cn.escheduler.common.Constants.*; -import static cn.escheduler.dao.datasource.ConnectionFactory.getMapper; - -/** - * process relative dao that some mappers in this. - */ -@Component -public class ProcessDao extends AbstractBaseDao { - - private final Logger logger = LoggerFactory.getLogger(getClass()); - - private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), - ExecutionStatus.RUNNING_EXEUTION.ordinal(), - ExecutionStatus.READY_PAUSE.ordinal(), -// ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(), - ExecutionStatus.READY_STOP.ordinal()}; - - @Autowired - private UserMapper userMapper; - - @Autowired - private ProcessDefinitionMapper processDefineMapper; - - @Autowired - private ProcessInstanceMapper processInstanceMapper; - - @Autowired - private DataSourceMapper dataSourceMapper; - - @Autowired - private ProcessInstanceMapMapper processInstanceMapMapper; - - @Autowired - private TaskInstanceMapper taskInstanceMapper; - - @Autowired - private CommandMapper commandMapper; - - @Autowired - private ScheduleMapper scheduleMapper; - - @Autowired - private UdfFuncMapper udfFuncMapper; - - @Autowired - private ResourceMapper resourceMapper; - - @Autowired - private WorkerGroupMapper workerGroupMapper; - - @Autowired - private ErrorCommandMapper errorCommandMapper; - - @Autowired - private TenantMapper tenantMapper; - - @Autowired - private ProjectMapper projectMapper; - - /** - * task queue impl - */ - protected ITaskQueue taskQueue; - - public ProcessDao(){ -// init(); - } - - /** - * initialize - */ - @Override - protected void init() { - userMapper = getMapper(UserMapper.class); - processDefineMapper = getMapper(ProcessDefinitionMapper.class); - processInstanceMapper = getMapper(ProcessInstanceMapper.class); - dataSourceMapper = getMapper(DataSourceMapper.class); - processInstanceMapMapper = getMapper(ProcessInstanceMapMapper.class); - taskInstanceMapper = getMapper(TaskInstanceMapper.class); - commandMapper = getMapper(CommandMapper.class); - scheduleMapper = getMapper(ScheduleMapper.class); - udfFuncMapper = getMapper(UdfFuncMapper.class); - resourceMapper = getMapper(ResourceMapper.class); - workerGroupMapper = getMapper(WorkerGroupMapper.class); - taskQueue = TaskQueueFactory.getTaskQueueInstance(); - tenantMapper = getMapper(TenantMapper.class); - } - - - /** - * find one command from command queue, construct process instance - * @param logger - * @param host - * @param validThreadNum - * @return - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public ProcessInstance scanCommand(Logger logger, String host, int validThreadNum){ - - ProcessInstance processInstance = null; - Command command = findOneCommand(); - if (command == null) { - return null; - } - logger.info(String.format("find one command: id: %d, type: %s", command.getId(),command.getCommandType().toString())); - - try{ - processInstance = constructProcessInstance(command, host); - //cannot construct process instance, return null; - if(processInstance == null){ - logger.error("scan command, command parameter is error: %s", command.toString()); - delCommandByid(command.getId()); - saveErrorCommand(command, "process instance is null"); - return null; - }else if(!checkThreadNum(command, validThreadNum)){ - logger.info("there is not enough thread for this command: {}",command.toString() ); - return setWaitingThreadProcess(command, processInstance); - }else{ - processInstance.setCommandType(command.getCommandType()); - processInstance.addHistoryCmd(command.getCommandType()); - saveProcessInstance(processInstance); - this.setSubProcessParam(processInstance); - delCommandByid(command.getId()); - return processInstance; - } - }catch (Exception e){ - logger.error("scan command error ", e); - saveErrorCommand(command, e.toString()); - delCommandByid(command.getId()); - } - return null; - } - - private void saveErrorCommand(Command command, String message) { - - ErrorCommand errorCommand = new ErrorCommand(command, message); - this.errorCommandMapper.insert(errorCommand); - } - - /** - * set process waiting thread - * @param command - * @param processInstance - * @return - */ - private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) { - processInstance.setState(ExecutionStatus.WAITTING_THREAD); - if(command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD){ - processInstance.addHistoryCmd(command.getCommandType()); - } - saveProcessInstance(processInstance); - this.setSubProcessParam(processInstance); - createRecoveryWaitingThreadCommand(command, processInstance); - return null; - } - - private boolean checkThreadNum(Command command, int validThreadNum) { - int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId()); - return validThreadNum >= commandThreadCount; - } - - /** - * insert one command - */ - public int createCommand(Command command) { - int result = 0; - if (command != null){ - result = commandMapper.insert(command); - } - return result; - } - - /** - * - * find one command from queue list - * @return - */ - public Command findOneCommand(){ - return commandMapper.getOneToRun(); - } - - /** - * check the input command exists in queue list - * @param command - * @return - */ - public Boolean verifyIsNeedCreateCommand(Command command){ - Boolean isNeedCreate = true; - Map cmdTypeMap = new HashMap(); - cmdTypeMap.put(CommandType.REPEAT_RUNNING,1); - cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS,1); - cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS,1); - CommandType commandType = command.getCommandType(); - - if(cmdTypeMap.containsKey(commandType)){ - JSONObject cmdParamObj = (JSONObject) JSONObject.parse(command.getCommandParam()); - JSONObject tempObj; - int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING); - - List commands = commandMapper.getAll(null); - //遍历所有命令 - for (Command tmpCommand:commands){ - if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){ - tempObj = (JSONObject) JSONObject.parse(tmpCommand.getCommandParam()); - if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){ - isNeedCreate = false; - break; - } - } - } - } - return isNeedCreate; - } - - /** - * find process instance detail by id - * @param processId - * @return - */ - public ProcessInstance findProcessInstanceDetailById(int processId){ - return processInstanceMapper.queryDetailById(processId); - } - - /** - * find process instance by id - * @param processId - * @return - */ - public ProcessInstance findProcessInstanceById(int processId){ - - return processInstanceMapper.selectById(processId); - } - - /** - * find process define by id. - * @param processDefinitionId - * @return - */ - public ProcessDefinition findProcessDefineById(int processDefinitionId) { - return processDefineMapper.selectById(processDefinitionId); - } - - /** - * delete work process instance by id - * @param processInstanceId - * @return - */ - public int deleteWorkProcessInstanceById(int processInstanceId){ - return processInstanceMapper.deleteById(processInstanceId); - } - - /** - * - * delete all sub process by parent instance id - * @return - */ - public int deleteAllSubWorkProcessByParentId(int processInstanceId){ - - List subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId); - - for(Integer subId : subProcessIdList ){ - deleteAllSubWorkProcessByParentId(subId); - deleteWorkProcessMapByParentId(subId); - deleteWorkProcessInstanceById(subId); - } - return 1; - } - - /** - * create process define - * @param processDefinition - * @return - */ - public int createProcessDefine(ProcessDefinition processDefinition){ - int count = 0; - if(processDefinition != null){ - count = this.processDefineMapper.insert(processDefinition); - } - return count; - } - - - /** - * calculate sub process number in the process define. - * @param processDefinitionId - * @return - */ - private Integer workProcessThreadNumCount(Integer processDefinitionId){ - List ids = new ArrayList<>(); - recurseFindSubProcessId(processDefinitionId, ids); - return ids.size()+1; - } - - /** - * recursive query sub process definition id by parent id. - * @param parentId - * @param ids - */ - public void recurseFindSubProcessId(int parentId, List ids){ - ProcessDefinition processDefinition = processDefineMapper.selectById(parentId); - String processDefinitionJson = processDefinition.getProcessDefinitionJson(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - List taskNodeList = processData.getTasks(); - - if (taskNodeList != null && taskNodeList.size() > 0){ - - for (TaskNode taskNode : taskNodeList){ - String parameter = taskNode.getParams(); - if (parameter.contains(CMDPARAM_SUB_PROCESS_DEFINE_ID)){ - SubProcessParameters subProcessParam = JSONObject.parseObject(parameter, SubProcessParameters.class); - ids.add(subProcessParam.getProcessDefinitionId()); - recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids); - } - } - } - } - - /** - * create recovery waiting thread command when thread pool is not enough for the process instance. - * sub work process instance need not to create recovery command. - * create recovery waiting thread command and delete origin command at the same time. - * if the recovery command is exists, only update the field update_time - * @param originCommand - * @param processInstance - */ - public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) { - - // sub process doesnot need to create wait command - if(processInstance.getIsSubProcess() == Flag.YES){ - if(originCommand != null){ - commandMapper.deleteById(originCommand.getId()); - } - return; - } - Map cmdParam = new HashMap<>(); - cmdParam.put(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD, String.valueOf(processInstance.getId())); - // process instance quit by "waiting thread" state - if(originCommand == null){ - Command command = new Command( - CommandType.RECOVER_WAITTING_THREAD, - processInstance.getTaskDependType(), - processInstance.getFailureStrategy(), - processInstance.getExecutorId(), - processInstance.getProcessDefinitionId(), - JSONUtils.toJson(cmdParam), - processInstance.getWarningType(), - processInstance.getWarningGroupId(), - processInstance.getScheduleTime(), - processInstance.getProcessInstancePriority() - ); - saveCommand(command); - return ; - } - - // update the command time if current command if recover from waiting - if(originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD){ - originCommand.setUpdateTime(new Date()); - saveCommand(originCommand); - }else{ - // delete old command and create new waiting thread command - commandMapper.deleteById(originCommand.getId()); - originCommand.setId(0); - originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD); - originCommand.setUpdateTime(new Date()); - originCommand.setCommandParam(JSONUtils.toJson(cmdParam)); - originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority()); - saveCommand(originCommand); - } - } - - /** - * get schedule time from command - * @param command - * @param cmdParam - * @return - */ - private Date getScheduleTime(Command command, Map cmdParam){ - Date scheduleTime = command.getScheduleTime(); - if(scheduleTime == null){ - if(cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)){ - scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); - } - } - return scheduleTime; - } - - /** - * generate a new work process instance from command. - * @param processDefinition - * @param command - * @param cmdParam - * @return - */ - private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition, - Command command, - Map cmdParam){ - ProcessInstance processInstance = new ProcessInstance(processDefinition); - processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); - processInstance.setRecovery(Flag.NO); - processInstance.setStartTime(new Date()); - processInstance.setRunTimes(1); - processInstance.setMaxTryTimes(0); - processInstance.setProcessDefinitionId(command.getProcessDefinitionId()); - processInstance.setCommandParam(command.getCommandParam()); - processInstance.setCommandType(command.getCommandType()); - processInstance.setIsSubProcess(Flag.NO); - processInstance.setTaskDependType(command.getTaskDependType()); - processInstance.setFailureStrategy(command.getFailureStrategy()); - processInstance.setExecutorId(command.getExecutorId()); - WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType(); - processInstance.setWarningType(warningType); - Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId(); - processInstance.setWarningGroupId(warningGroupId); - - // schedule time - Date scheduleTime = getScheduleTime(command, cmdParam); - if(scheduleTime != null){ - processInstance.setScheduleTime(scheduleTime); - } - processInstance.setCommandStartTime(command.getStartTime()); - processInstance.setLocations(processDefinition.getLocations()); - processInstance.setConnects(processDefinition.getConnects()); - // curing global params - processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( - processDefinition.getGlobalParamMap(), - processDefinition.getGlobalParamList(), - getCommandTypeIfComplement(processInstance, command), - processInstance.getScheduleTime())); - - //copy process define json to process instance - processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson()); - // set process instance priority - processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); - int workerGroupId = command.getWorkerGroupId() == 0 ? -1 : command.getWorkerGroupId(); - processInstance.setWorkerGroupId(workerGroupId); - processInstance.setTimeout(processDefinition.getTimeout()); - processInstance.setTenantId(processDefinition.getTenantId()); - return processInstance; - } - - /** - * get process tenant - * there is tenant id in definition, use the tenant of the definition. - * if there is not tenant id in the definiton or the tenant not exist - * use definition creator's tenant. - * @param tenantId - * @param userId - * @return - */ - public Tenant getTenantForProcess(int tenantId, int userId){ - Tenant tenant = null; - if(tenantId >= 0){ - tenant = tenantMapper.queryById(tenantId); - } - if(tenant == null){ - User user = userMapper.selectById(userId); - tenant = tenantMapper.queryById(user.getTenantId()); - } - return tenant; - } - - /** - * check command parameters is valid - * @param command - * @param cmdParam - * @return - */ - private Boolean checkCmdParam(Command command, Map cmdParam){ - if(command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType()== TaskDependType.TASK_PRE){ - if(cmdParam == null - || !cmdParam.containsKey(Constants.CMDPARAM_START_NODE_NAMES) - || cmdParam.get(Constants.CMDPARAM_START_NODE_NAMES).isEmpty()){ - logger.error(String.format("command node depend type is %s, but start nodes is null ", command.getTaskDependType().toString())); - return false; - } - } - return true; - } - - /** - * construct process instance according to one command. - * @param command - * @param host - * @return - */ - private ProcessInstance constructProcessInstance(Command command, String host){ - - ProcessInstance processInstance = null; - CommandType commandType = command.getCommandType(); - Map cmdParam = JSONUtils.toMap(command.getCommandParam()); - - ProcessDefinition processDefinition = null; - if(command.getProcessDefinitionId() != 0){ - processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId()); - if(processDefinition == null){ - logger.error(String.format("cannot find the work process define! define id : %d", command.getProcessDefinitionId())); - return null; - } - } - - if(cmdParam != null ){ - Integer processInstanceId = 0; - // recover from failure or pause tasks - if(cmdParam.containsKey(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING)) { - String processId = cmdParam.get(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING); - processInstanceId = Integer.parseInt(processId); - if (processInstanceId == 0) { - logger.error("command parameter is error, [ ProcessInstanceId ] is 0"); - return null; - } - }else if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){ - // sub process map - String pId = cmdParam.get(Constants.CMDPARAM_SUB_PROCESS); - processInstanceId = Integer.parseInt(pId); - }else if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD)){ - // waiting thread command - String pId = cmdParam.get(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD); - processInstanceId = Integer.parseInt(pId); - } - if(processInstanceId ==0){ - processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); - }else{ - processInstance = this.findProcessInstanceDetailById(processInstanceId); - } - processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId()); - processInstance.setProcessDefinition(processDefinition); - - //reset command parameter - if(processInstance.getCommandParam() != null){ - Map processCmdParam = JSONUtils.toMap(processInstance.getCommandParam()); - for(String key : processCmdParam.keySet()){ - if(!cmdParam.containsKey(key)){ - cmdParam.put(key,processCmdParam.get(key)); - } - } - } - // reset command parameter if sub process - if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){ - processInstance.setCommandParam(command.getCommandParam()); - } - }else{ - // generate one new process instance - processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); - } - if(!checkCmdParam(command, cmdParam)){ - logger.error("command parameter check failed!"); - return null; - } - - if(command.getScheduleTime() != null){ - processInstance.setScheduleTime(command.getScheduleTime()); - } - processInstance.setHost(host); - - ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXEUTION; - int runTime = processInstance.getRunTimes(); - switch (commandType){ - case START_PROCESS: - break; - case START_FAILURE_TASK_PROCESS: - // find failed tasks and init these tasks - List failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE); - List toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE); - List killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); - cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); - - failedList.addAll(killedList); - failedList.addAll(toleranceList); - for(Integer taskId : failedList){ - initTaskInstance(this.findTaskInstanceById(taskId)); - } - cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, - String.join(Constants.COMMA, convertIntListToString(failedList))); - processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); - processInstance.setRunTimes(runTime +1 ); - break; - case START_CURRENT_TASK_PROCESS: - break; - case RECOVER_WAITTING_THREAD: - break; - case RECOVER_SUSPENDED_PROCESS: - // find pause tasks and init task's state - cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); - List suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE); - List stopNodeList = findTaskIdByInstanceState(processInstance.getId(), - ExecutionStatus.KILL); - suspendedNodeList.addAll(stopNodeList); - for(Integer taskId : suspendedNodeList){ - // 把暂停状态初始化 - initTaskInstance(this.findTaskInstanceById(taskId)); - } - cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList))); - processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); - processInstance.setRunTimes(runTime +1); - break; - case RECOVER_TOLERANCE_FAULT_PROCESS: - // recover tolerance fault process - processInstance.setRecovery(Flag.YES); - runStatus = processInstance.getState(); - break; - case COMPLEMENT_DATA: - // delete all the valid tasks when complement data - List taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId()); - for(TaskInstance taskInstance : taskInstanceList){ - taskInstance.setFlag(Flag.NO); - this.updateTaskInstance(taskInstance); - } - break; - case REPEAT_RUNNING: - // delete the recover task names from command parameter - if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){ - cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); - processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); - } - // delete all the valid tasks when repeat running - List validTaskList = findValidTaskListByProcessId(processInstance.getId()); - for(TaskInstance taskInstance : validTaskList){ - taskInstance.setFlag(Flag.NO); - updateTaskInstance(taskInstance); - } - processInstance.setStartTime(new Date()); - processInstance.setEndTime(null); - processInstance.setRunTimes(runTime +1); - initComplementDataParam(processDefinition, processInstance, cmdParam); - break; - case SCHEDULER: - break; - default: - break; - } - processInstance.setState(runStatus); - return processInstance; - } - - /** - * return complement data if the process start with complement data - */ - private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command){ - if(CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()){ - return CommandType.COMPLEMENT_DATA; - }else{ - return command.getCommandType(); - } - } - - /** - * initialize complement data parameters - * @param processDefinition - * @param processInstance - * @param cmdParam - */ - private void initComplementDataParam(ProcessDefinition processDefinition, ProcessInstance processInstance, Map cmdParam) { - if(!processInstance.isComplementData()){ - return; - } - - Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE), - YYYY_MM_DD_HH_MM_SS); - processInstance.setScheduleTime(startComplementTime); - processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( - processDefinition.getGlobalParamMap(), - processDefinition.getGlobalParamList(), - CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); - - } - - /** - * set sub work process parameters. - * handle sub work process instance, update relation table and command parameters - * set sub work process flag, extends parent work process command parameters. - */ - public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance){ - String cmdParam = subProcessInstance.getCommandParam(); - if(StringUtils.isEmpty(cmdParam)){ - return subProcessInstance; - } - Map paramMap = JSONUtils.toMap(cmdParam); - // write sub process id into cmd param. - if(paramMap.containsKey(CMDPARAM_SUB_PROCESS) - && CMDPARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMDPARAM_SUB_PROCESS))){ - paramMap.remove(CMDPARAM_SUB_PROCESS); - paramMap.put(CMDPARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId())); - subProcessInstance.setCommandParam(JSONUtils.toJson(paramMap)); - subProcessInstance.setIsSubProcess(Flag.YES); - this.saveProcessInstance(subProcessInstance); - } - // copy parent instance user def params to sub process.. - String parentInstanceId = paramMap.get(CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID); - if(StringUtils.isNotEmpty(parentInstanceId)){ - ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId)); - if(parentInstance != null){ - subProcessInstance.setGlobalParams( - joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams())); - this.saveProcessInstance(subProcessInstance); - }else{ - logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam); - } - } - ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class); - if(processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0){ - return subProcessInstance; - } - // update sub process id to process map table - processInstanceMap.setProcessInstanceId(subProcessInstance.getId()); - - this.updateWorkProcessInstanceMap(processInstanceMap); - return subProcessInstance; - } - - /** - * join parent global params into sub process. - * only the keys doesn't in sub process global would be joined. - * @param parentGlobalParams - * @param subGlobalParams - * @return - */ - private String joinGlobalParams(String parentGlobalParams, String subGlobalParams){ - List parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class); - List subPropertyList = JSONUtils.toList(subGlobalParams, Property.class); - Map subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); - - for(Property parent : parentPropertyList){ - if(!subMap.containsKey(parent.getProp())){ - subPropertyList.add(parent); - } - } - return JSONUtils.toJson(subPropertyList); - } - - /** - * initialize task instance - * @param taskInstance - */ - private void initTaskInstance(TaskInstance taskInstance){ - - if(!taskInstance.isSubProcess()){ - if(taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure()){ - taskInstance.setFlag(Flag.NO); - updateTaskInstance(taskInstance); - return; - } - } - taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); - updateTaskInstance(taskInstance); - } - - /** - * submit task to mysql and task queue - * submit sub process to command - * @param taskInstance - * @return - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public TaskInstance submitTask(TaskInstance taskInstance, ProcessInstance processInstance){ - logger.info("start submit task : {}, instance id:{}, state: {}, ", - taskInstance.getName(), processInstance.getId(), processInstance.getState() ); - processInstance = this.findProcessInstanceDetailById(processInstance.getId()); - //submit to mysql - TaskInstance task= submitTaskInstanceToMysql(taskInstance, processInstance); - if(task.isSubProcess() && !task.getState().typeIsFinished()){ - ProcessInstanceMap processInstanceMap = setProcessInstanceMap(processInstance, task); - - TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class); - Map subProcessParam = JSONUtils.toMap(taskNode.getParams()); - Integer defineId = Integer.parseInt(subProcessParam.get(Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID)); - createSubWorkProcessCommand(processInstance, processInstanceMap, defineId, task); - }else if(!task.getState().typeIsFinished()){ - //submit to task queue - task.setProcessInstancePriority(processInstance.getProcessInstancePriority()); - submitTaskToQueue(task); - } - logger.info("submit task :{} state:{} complete, instance id:{} state: {} ", - taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState()); - return task; - } - - /** - * set work process instance map - * @param parentInstance - * @param parentTask - * @return - */ - private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask){ - ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId()); - if(processMap != null){ - return processMap; - }else if(parentInstance.getCommandType() == CommandType.REPEAT_RUNNING - || parentInstance.isComplementData()){ - // update current task id to map - // repeat running does not generate new sub process instance - processMap = findPreviousTaskProcessMap(parentInstance, parentTask); - if(processMap!= null){ - processMap.setParentTaskInstanceId(parentTask.getId()); - updateWorkProcessInstanceMap(processMap); - return processMap; - } - } - // new task - processMap = new ProcessInstanceMap(); - processMap.setParentProcessInstanceId(parentInstance.getId()); - processMap.setParentTaskInstanceId(parentTask.getId()); - createWorkProcessInstanceMap(processMap); - return processMap; - } - - /** - * find previous task work process map. - * @param parentProcessInstance - * @param parentTask - * @return - */ - private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance, - TaskInstance parentTask) { - - Integer preTaskId = 0; - List preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId()); - for(TaskInstance task : preTaskList){ - if(task.getName().equals(parentTask.getName())){ - preTaskId = task.getId(); - ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId); - if(map!=null){ - return map; - } - } - } - logger.info("sub process instance is not found,parent task:{},parent instance:{}", - parentTask.getId(), parentProcessInstance.getId()); - return null; - } - - /** - * create sub work process command - * @param parentProcessInstance - * @param instanceMap - * @param childDefineId - * @param task - */ - private void createSubWorkProcessCommand(ProcessInstance parentProcessInstance, - ProcessInstanceMap instanceMap, - Integer childDefineId, TaskInstance task){ - ProcessInstance childInstance = findSubProcessInstance(parentProcessInstance.getId(), task.getId()); - - CommandType fatherType = parentProcessInstance.getCommandType(); - CommandType commandType = fatherType; - if(childInstance == null || commandType == CommandType.REPEAT_RUNNING){ - String fatherHistoryCommand = parentProcessInstance.getHistoryCmd(); - // sub process must begin with schedule/complement data - // if father begin with scheduler/complement data - if(fatherHistoryCommand.startsWith(CommandType.SCHEDULER.toString()) || - fatherHistoryCommand.startsWith(CommandType.COMPLEMENT_DATA.toString())){ - commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]); - } - } - - if(childInstance != null){ - childInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); - updateProcessInstance(childInstance); - } - // set sub work process command - String processMapStr = JSONUtils.toJson(instanceMap); - Map cmdParam = JSONUtils.toMap(processMapStr); - - if(commandType == CommandType.COMPLEMENT_DATA || - (childInstance != null && childInstance.isComplementData())){ - Map parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam()); - String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE); - String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE); - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime); - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime); - processMapStr = JSONUtils.toJson(cmdParam); - } - - updateSubProcessDefinitionByParent(parentProcessInstance, childDefineId); - - Command command = new Command(); - command.setWarningType(parentProcessInstance.getWarningType()); - command.setWarningGroupId(parentProcessInstance.getWarningGroupId()); - command.setFailureStrategy(parentProcessInstance.getFailureStrategy()); - command.setProcessDefinitionId(childDefineId); - command.setScheduleTime(parentProcessInstance.getScheduleTime()); - command.setExecutorId(parentProcessInstance.getExecutorId()); - command.setCommandParam(processMapStr); - command.setCommandType(commandType); - command.setProcessInstancePriority(parentProcessInstance.getProcessInstancePriority()); - createCommand(command); - logger.info("sub process command created: {} ", command.toString()); - } - - private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) { - ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId()); - ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId); - if(childDefinition != null && fatherDefinition != null){ - childDefinition.setReceivers(fatherDefinition.getReceivers()); - childDefinition.setReceiversCc(fatherDefinition.getReceiversCc()); - processDefineMapper.updateById(childDefinition); - } - } - - /** - * submit task to mysql - * @param taskInstance - * @return - */ - public TaskInstance submitTaskInstanceToMysql(TaskInstance taskInstance, ProcessInstance processInstance){ - ExecutionStatus processInstanceState = processInstance.getState(); - - if(taskInstance.getState().typeIsFailure()){ - if(taskInstance.isSubProcess()){ - taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 ); - }else { - - if( processInstanceState != ExecutionStatus.READY_STOP - && processInstanceState != ExecutionStatus.READY_PAUSE){ - // failure task set invalid - taskInstance.setFlag(Flag.NO); - updateTaskInstance(taskInstance); - // crate new task instance - if(taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE){ - taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 ); - } - taskInstance.setEndTime(null); - taskInstance.setStartTime(new Date()); - taskInstance.setFlag(Flag.YES); - taskInstance.setHost(null); - taskInstance.setId(0); - } - } - } - taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority()); - taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState)); - taskInstance.setSubmitTime(new Date()); - saveTaskInstance(taskInstance); - return taskInstance; - } - - /** - * submit task to queue - * @param task - */ - public Boolean submitTaskToQueue(TaskInstance task) { - - try{ - // task cannot submit when running - if(task.getState() == ExecutionStatus.RUNNING_EXEUTION){ - logger.info(String.format("submit to task queue, but task [%s] state already be running. ", task.getName())); - return true; - } - if(checkTaskExistsInTaskQueue(task)){ - logger.info(String.format("submit to task queue, but task [%s] already exists in the queue.", task.getName())); - return true; - } - logger.info("task ready to queue: {}" , task); - taskQueue.add(SCHEDULER_TASKS_QUEUE, taskZkInfo(task)); - logger.info(String.format("master insert into queue success, task : %s", task.getName()) ); - return true; - }catch (Exception e){ - logger.error("submit task to queue Exception: ", e); - logger.error("task queue error : %s", JSONUtils.toJson(task)); - return false; - - } - } - - /** - * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_${task executed by ip1},${ip2}... - * - * The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low. - * - * 流程实例优先级_流程实例id_任务优先级_任务id_任务执行机器ip1,ip2... high <- low - * - * @param taskInstance - * @return - */ - private String taskZkInfo(TaskInstance taskInstance) { - - int taskWorkerGroupId = getTaskWorkerGroupId(taskInstance); - - StringBuilder sb = new StringBuilder(100); - - sb.append(taskInstance.getProcessInstancePriority().ordinal()).append(Constants.UNDERLINE) - .append(taskInstance.getProcessInstanceId()).append(Constants.UNDERLINE) - .append(taskInstance.getTaskInstancePriority().ordinal()).append(Constants.UNDERLINE) - .append(taskInstance.getId()).append(Constants.UNDERLINE); - - if(taskWorkerGroupId > 0){ - //not to find data from db - WorkerGroup workerGroup = queryWorkerGroupById(taskWorkerGroupId); - if(workerGroup == null ){ - logger.info("task {} cannot find the worker group, use all worker instead.", taskInstance.getId()); - - sb.append(Constants.DEFAULT_WORKER_ID); - return sb.toString(); - } - - String ips = workerGroup.getIpList(); - - if(StringUtils.isBlank(ips)){ - logger.error("task:{} worker group:{} parameters(ip_list) is null, this task would be running on all workers", - taskInstance.getId(), workerGroup.getId()); - sb.append(Constants.DEFAULT_WORKER_ID); - return sb.toString(); - } - - StringBuilder ipSb = new StringBuilder(100); - String[] ipArray = ips.split(COMMA); - - for (String ip : ipArray) { - long ipLong = IpUtils.ipToLong(ip); - ipSb.append(ipLong).append(COMMA); - } - - if(ipSb.length() > 0) { - ipSb.deleteCharAt(ipSb.length() - 1); - } - - sb.append(ipSb); - }else{ - sb.append(Constants.DEFAULT_WORKER_ID); - } - - - return sb.toString(); - } - - /** - * get submit task instance state by the work process state - * cannot modify the task state when running/kill/submit success, or this - * task instance is already exists in task queue . - * return pause if work process state is ready pause - * return stop if work process state is ready stop - * if all of above are not satisfied, return submit success - * - * @param taskInstance - * @param processInstanceState - * @return - */ - public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState){ - ExecutionStatus state = taskInstance.getState(); - if( - // running or killed - // the task already exists in task queue - // return state - state == ExecutionStatus.RUNNING_EXEUTION - || state == ExecutionStatus.KILL - || checkTaskExistsInTaskQueue(taskInstance) - ){ - return state; - } - //return pasue /stop if process instance state is ready pause / stop - // or return submit success - if( processInstanceState == ExecutionStatus.READY_PAUSE){ - state = ExecutionStatus.PAUSE; - }else if(processInstanceState == ExecutionStatus.READY_STOP) { - state = ExecutionStatus.KILL; - }else{ - state = ExecutionStatus.SUBMITTED_SUCCESS; - } - return state; - } - - /** - * check the task instance existing in queue - * @return - */ - public boolean checkTaskExistsInTaskQueue(TaskInstance task){ - if(task.isSubProcess()){ - return false; - } - - String taskZkInfo = taskZkInfo(task); - - return taskQueue.checkTaskExists(SCHEDULER_TASKS_QUEUE, taskZkInfo); - } - - /** - * create a new process instance - * @param processInstance - */ - public void createProcessInstance(ProcessInstance processInstance){ - - if (processInstance != null){ - processInstanceMapper.insert(processInstance); - } - } - - /** - * insert or update work process instance to data base - * @param workProcessInstance - */ - public void saveProcessInstance(ProcessInstance workProcessInstance){ - - if (workProcessInstance == null){ - logger.error("save error, process instance is null!"); - return ; - } - //创建流程实例 - if(workProcessInstance.getId() != 0){ - processInstanceMapper.updateById(workProcessInstance); - }else{ - createProcessInstance(workProcessInstance); - } - } - - /** - * insert or update command - * @param command - * @return - */ - public int saveCommand(Command command){ - if(command.getId() != 0){ - return commandMapper.updateById(command); - }else{ - return commandMapper.insert(command); - } - } - - /** - * insert or update task instance - * @param taskInstance - * @return - */ - public boolean saveTaskInstance(TaskInstance taskInstance){ - if(taskInstance.getId() != 0){ - return updateTaskInstance(taskInstance); - }else{ - return createTaskInstance(taskInstance); - } - } - - /** - * insert task instance - * @param taskInstance - * @return - */ - public boolean createTaskInstance(TaskInstance taskInstance) { - int count = taskInstanceMapper.insert(taskInstance); - return count > 0; - } - - /** - * update task instance - * @param taskInstance - * @return - */ - public boolean updateTaskInstance(TaskInstance taskInstance){ - int count = taskInstanceMapper.updateById(taskInstance); - return count > 0; - } - /** - * delete a command by id - * @param id - */ - public void delCommandByid(int id) { - commandMapper.deleteById(id); - } - - public TaskInstance findTaskInstanceById(Integer taskId){ - return taskInstanceMapper.queryById(taskId); - } - - - /** - * package task instance,associate processInstance and processDefine - * @param taskInstId - * @return - */ - public TaskInstance getTaskInstanceRelationByTaskId(int taskInstId){ - // get task instance - TaskInstance taskInstance = findTaskInstanceById(taskInstId); - // get process instance - ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); - // get process define - ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId()); - - taskInstance.setProcessInstance(processInstance); - taskInstance.setProcessDefine(processDefine); - return taskInstance; - } - - - /** - * get id list by task state - * @param instanceId - * @param state - * @return - */ - public List findTaskIdByInstanceState(int instanceId, ExecutionStatus state){ - return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal()); - } - - /** - * - * find valid task list by process definition id - * @param processInstanceId - * @return - */ - public List findValidTaskListByProcessId(Integer processInstanceId){ - return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES); - } - - /** - * find previous task list by work process id - * @param workProcessInstanceId - * @return - */ - public List findPreviousTaskListByWorkProcessId(Integer workProcessInstanceId){ - return taskInstanceMapper.findValidTaskListByProcessId(workProcessInstanceId, Flag.NO); - } - - /** - * update work process instance map - * @param processInstanceMap - * @return - */ - public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){ - return processInstanceMapMapper.updateById(processInstanceMap); - } - - - /** - * create work process instance map - * @param processInstanceMap - * @return - */ - public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){ - Integer count = 0; - if(processInstanceMap !=null){ - return processInstanceMapMapper.insert(processInstanceMap); - } - return count; - } - - /** - * find work process map by parent process id and parent task id. - * @param parentWorkProcessId - * @param parentTaskId - * @return - */ - public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId){ - return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId); - } - - /** - * delete work process map by parent process id - * @param parentWorkProcessId - * @return - */ - public int deleteWorkProcessMapByParentId(int parentWorkProcessId){ - return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId); - - } - - public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId){ - ProcessInstance processInstance = null; - ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId); - if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){ - return processInstance; - } - processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId()); - return processInstance; - } - public ProcessInstance findParentProcessInstance(Integer subProcessId) { - ProcessInstance processInstance = null; - ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId); - if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){ - return processInstance; - } - processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId()); - return processInstance; - } - - - - /** - * change task state - * @param state - * @param startTime - * @param host - * @param executePath - */ - public void changeTaskState(ExecutionStatus state, Date startTime, String host, - String executePath, - String logPath, - int taskInstId) { - TaskInstance taskInstance = taskInstanceMapper.queryById(taskInstId); - taskInstance.setState(state); - taskInstance.setStartTime(startTime); - taskInstance.setHost(host); - taskInstance.setExecutePath(executePath); - taskInstance.setLogPath(logPath); - saveTaskInstance(taskInstance); - } - - /** - * update process instance - * @param instance - * @return - */ - public int updateProcessInstance(ProcessInstance instance){ - - return processInstanceMapper.updateById(instance); - } - - /** - * update the process instance - * @param processInstanceId - * @param processJson - * @param globalParams - * @param scheduleTime - * @param flag - * @param locations - * @param connects - * @return - */ - public int updateProcessInstance(Integer processInstanceId, String processJson, - String globalParams, Date scheduleTime, Flag flag, - String locations, String connects){ - ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); - if(processInstance!= null){ - processInstance.setProcessInstanceJson(processJson); - processInstance.setGlobalParams(globalParams); - processInstance.setScheduleTime(scheduleTime); - processInstance.setLocations(locations); - processInstance.setConnects(connects); - return processInstanceMapper.updateById(processInstance); - } - return 0; - } - - /** - * change task state - * @param state - * @param endTime - */ - public void changeTaskState(ExecutionStatus state, - Date endTime, - int taskInstId) { - TaskInstance taskInstance = taskInstanceMapper.queryById(taskInstId); - taskInstance.setState(state); - taskInstance.setEndTime(endTime); - saveTaskInstance(taskInstance); - } - - /** - * convert integer list to string list - * @param intList - * @return - */ - public List convertIntListToString(List intList){ - if(intList == null){ - return new ArrayList<>(); - } - List result = new ArrayList(intList.size()); - for(Integer intVar : intList){ - result.add(String.valueOf(intVar)); - } - return result; - } - - /** - * set task - * 根据任务实例id设置pid - * @param taskInstId - * @param pid - */ - public void updatePidByTaskInstId(int taskInstId, int pid) { - TaskInstance taskInstance = taskInstanceMapper.queryById(taskInstId); - taskInstance.setPid(pid); - taskInstance.setAppLink(""); - saveTaskInstance(taskInstance); - } - - /** - * update pid and app links field by task instance id - * @param taskInstId - * @param pid - */ - public void updatePidByTaskInstId(int taskInstId, int pid,String appLinks) { - - TaskInstance taskInstance = taskInstanceMapper.queryById(taskInstId); - taskInstance.setPid(pid); - taskInstance.setAppLink(appLinks); - saveTaskInstance(taskInstance); - } - - /** - * query ProcessDefinition by name - * - * @see ProcessDefinition - */ - public ProcessDefinition findProcessDefineByName(int projectId, String name) { - ProcessDefinition projectFlow = processDefineMapper.queryByDefineName(projectId, name); - return projectFlow; - } - - /** - * query Schedule

- * - * @see Schedule - */ - public Schedule querySchedule(int id) { - return scheduleMapper.selectById(id); - } - - public List queryNeedFailoverProcessInstances(String host){ - - return processInstanceMapper.queryByHostAndStatus(host, stateArray); - } - - - - - /** - * process need failover process instance - * @param processInstance - */ - @Transactional(value = "TransactionManager",rollbackFor = Exception.class) - public void processNeedFailoverProcessInstances(ProcessInstance processInstance){ - - - //1 update processInstance host is null - processInstance.setHost("null"); - processInstanceMapper.updateById(processInstance); - - //2 insert into recover command - Command cmd = new Command(); - cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId()); - cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId())); - cmd.setExecutorId(processInstance.getExecutorId()); - cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); - createCommand(cmd); - - } - - /** - * query all need failover task instances by host - * @param host - * @return - */ - public List queryNeedFailoverTaskInstances(String host){ - return taskInstanceMapper.queryByHostAndStatus(host, - StringUtils.join(stateArray, ",")); - } - - /** - * find data source by id - * @param id - * @return - */ - public DataSource findDataSourceById(int id){ - return dataSourceMapper.selectById(id); - } - - - /** - * update process instance state by id - * @param processInstanceId - * @param executionStatus - * @return - */ - public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { - ProcessInstance instance = processInstanceMapper.selectById(processInstanceId); - instance.setState(executionStatus); - return processInstanceMapper.updateById(instance); - - } - - /** - * find process instance by the task id - * @param taskId - * @return - */ - public ProcessInstance findProcessInstanceByTaskId(int taskId){ - TaskInstance taskInstance = taskInstanceMapper.selectById(taskId); - if(taskInstance!= null){ - return processInstanceMapper.selectById(taskInstance.getProcessInstanceId()); - } - return null; - } - - /** - * find udf function list by id list string - * @param ids - * @return - */ - public List queryUdfFunListByids(String ids){ - - return udfFuncMapper.queryUdfByIdStr(ids, null); - } - - /** - * find tenant code by resource name - * @param resName - * @return - */ - public String queryTenantCodeByResName(String resName){ - return resourceMapper.queryTenantCodeByResourceName(resName); - } - - /** - * find schedule list by process define id. - * @param ids - * @return - */ - public List selectAllByProcessDefineId(int[] ids){ - return scheduleMapper.selectAllByProcessDefineArray( - ids); - } - - /** - * get dependency cycle by work process define id and scheduler fire time - * - * @param masterId - * @param processDefinitionId - * @param scheduledFireTime 任务调度预计触发的时间 - * @return - * @throws Exception - */ - public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception { - List list = getCycleDependencies(masterId,new int[]{processDefinitionId},scheduledFireTime); - return list.size()>0 ? list.get(0) : null; - - } - - /** - * - * get dependency cycle list by work process define id list and scheduler fire time - * @param masterId - * @param ids - * @param scheduledFireTime 任务调度预计触发的时间 - * @return - * @throws Exception - */ - public List getCycleDependencies(int masterId,int[] ids,Date scheduledFireTime) throws Exception { - List cycleDependencyList = new ArrayList(); - if(ArrayUtils.isEmpty(ids)){ - logger.warn("ids[] is empty!is invalid!"); - return cycleDependencyList; - } - if(scheduledFireTime == null){ - logger.warn("scheduledFireTime is null!is invalid!"); - return cycleDependencyList; - } - - - String strCrontab = ""; - CronExpression depCronExpression; - Cron depCron; - List list; - List schedules = this.selectAllByProcessDefineId(ids); - // 遍历所有的调度信息 - for(Schedule depSchedule:schedules){ - strCrontab = depSchedule.getCrontab(); - depCronExpression = CronUtils.parse2CronExpression(strCrontab); - depCron = CronUtils.parse2Cron(strCrontab); - CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron); - if(cycleEnum == null){ - logger.error("{} is not valid",strCrontab); - continue; - } - Calendar calendar = Calendar.getInstance(); - switch (cycleEnum){ - /*case MINUTE: - calendar.add(Calendar.MINUTE,-61);*/ - case HOUR: - calendar.add(Calendar.HOUR,-25); - break; - case DAY: - calendar.add(Calendar.DATE,-32); - break; - case WEEK: - calendar.add(Calendar.DATE,-32); - break; - case MONTH: - calendar.add(Calendar.MONTH,-13); - break; - default: - logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleEnum.name()); - continue; - } - Date start = calendar.getTime(); - - if(depSchedule.getProcessDefinitionId() == masterId){ - list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression); - }else { - list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression); - } - if(list.size()>=1){ - start = list.get(list.size()-1); - CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(),start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum); - cycleDependencyList.add(dependency); - } - - } - return cycleDependencyList; - } - - /** - * find last scheduler process instance in the date interval - * @param definitionId - * @param dateInterval - * @return - */ - public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) { - return processInstanceMapper.queryLastSchedulerProcess(definitionId, - DateUtils.dateToString(dateInterval.getStartTime()), - DateUtils.dateToString(dateInterval.getEndTime())); - } - - public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) { - return processInstanceMapper.queryLastManualProcess(definitionId, - dateInterval.getStartTime(), - dateInterval.getEndTime()); - } - - public ProcessInstance findLastRunningProcess(int definitionId, DateInterval dateInterval) { - return processInstanceMapper.queryLastRunningProcess(definitionId, - DateUtils.dateToString(dateInterval.getStartTime()), - DateUtils.dateToString(dateInterval.getEndTime()), - stateArray); - } - - /** - * query user queue by process instance id - * @param processInstanceId - * @return - */ - public String queryUserQueueByProcessInstanceId(int processInstanceId){ - - String queue = ""; - ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId); - if(processInstance == null){ - return queue; - } - User executor = userMapper.selectById(processInstance.getExecutorId()); - if(executor != null){ - queue = executor.getQueue(); - } - return queue; - } - - /** - * query worker group by id - * @param workerGroupId - * @return - */ - public WorkerGroup queryWorkerGroupById(int workerGroupId){ - - return workerGroupMapper.selectById(workerGroupId); - } - - /** - * get task worker group id - * - * @param taskInstance - * @return - */ - public int getTaskWorkerGroupId(TaskInstance taskInstance) { - int taskWorkerGroupId = taskInstance.getWorkerGroupId(); - int processInstanceId = taskInstance.getProcessInstanceId(); - - ProcessInstance processInstance = findProcessInstanceById(processInstanceId); - - if(processInstance == null){ - logger.error("cannot find the task:{} process instance", taskInstance.getId()); - return Constants.DEFAULT_WORKER_ID; - } - int processWorkerGroupId = processInstance.getWorkerGroupId(); - - taskWorkerGroupId = (taskWorkerGroupId <= 0 ? processWorkerGroupId : taskWorkerGroupId); - return taskWorkerGroupId; - } - - public List getProjectListHavePerm(int userId){ - List createProjects = projectMapper.queryProjectCreatedByUser(userId); - List authedProjects = projectMapper.queryAuthedProjectListByUserId(userId); - - if(createProjects == null){ - createProjects = new ArrayList<>(); - } - - if(authedProjects != null){ - createProjects.addAll(authedProjects); - } - return createProjects; - } - - public List getProjectIdListHavePerm(int userId){ - - List projectIdList = new ArrayList<>(); - for(Project project : getProjectListHavePerm(userId)){ - projectIdList.add(project.getId()); - } - return projectIdList; - } - - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java b/escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java deleted file mode 100644 index 6d2942fe15..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/TaskRecordDao.java +++ /dev/null @@ -1,299 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.TaskRecordStatus; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.dao.entity.TaskRecord; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.*; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - - -/** - * task record dao - */ -public class TaskRecordDao { - - - private static Logger logger = LoggerFactory.getLogger(TaskRecordDao.class.getName()); - - - - /** - * load conf file - */ - private static Configuration conf; - - static { - try { - conf = new PropertiesConfiguration(Constants.DATA_SOURCE_PROPERTIES); - }catch (ConfigurationException e){ - logger.error("load configuration excetpion",e); - System.exit(1); - } - } - - - /** - * get task record flag - * @return - */ - public static boolean getTaskRecordFlag(){ - return conf.getBoolean(Constants.TASK_RECORD_FLAG); - } - /** - * create connection - * @return - */ - private static Connection getConn() { - if(!conf.getBoolean(Constants.TASK_RECORD_FLAG)){ - return null; - } - String driver = "com.mysql.jdbc.Driver"; - String url = conf.getString(Constants.TASK_RECORD_URL); - String username = conf.getString(Constants.TASK_RECORD_USER); - String password = conf.getString(Constants.TASK_RECORD_PWD); - Connection conn = null; - try { - //classLoader,加载对应驱动 - Class.forName(driver); - conn = DriverManager.getConnection(url, username, password); - } catch (ClassNotFoundException e) { - logger.error("Exception ", e); - } catch (SQLException e) { - logger.error("Exception ", e); - } - return conn; - } - - /** - * generate where sql string - * @param filterMap - * @return - */ - private static String getWhereString(Map filterMap) - { - if(filterMap.size() ==0){ - return ""; - } - - String result = " where 1=1 "; - - Object taskName = filterMap.get("taskName"); - if(taskName != null && StringUtils.isNotEmpty(taskName.toString())){ - result += " and PROC_NAME like concat('%', '" + taskName.toString() + "', '%') "; - } - - Object taskDate = filterMap.get("taskDate"); - if(taskDate != null && StringUtils.isNotEmpty(taskDate.toString())){ - result += " and PROC_DATE='" + taskDate.toString() + "'"; - } - - Object state = filterMap.get("state"); - if(state != null && StringUtils.isNotEmpty(state.toString())){ - result += " and NOTE='" + state.toString() + "'"; - } - - Object sourceTable = filterMap.get("sourceTable"); - if(sourceTable!= null && StringUtils.isNotEmpty(sourceTable.toString())){ - result += " and SOURCE_TAB like concat('%', '" + sourceTable.toString()+ "', '%')"; - } - - Object targetTable = filterMap.get("targetTable"); - if(sourceTable!= null && StringUtils.isNotEmpty(targetTable.toString())){ - result += " and TARGET_TAB like concat('%', '"+ targetTable.toString()+"', '%') " ; - } - - Object start = filterMap.get("startTime"); - if(start != null && StringUtils.isNotEmpty(start.toString())){ - result += " and STARTDATE>='" + start.toString() + "'"; - } - - Object end = filterMap.get("endTime"); - if(end != null && StringUtils.isNotEmpty(end.toString())){ - result += " and ENDDATE>='" + end.toString()+ "'"; - } - return result; - } - - /** - * count task record - * @param filterMap - * @return - */ - public static int countTaskRecord(Map filterMap, String table){ - - int count = 0; - Connection conn = null; - try { - conn = getConn(); - if(conn == null){ - return count; - } - String sql = String.format("select count(1) as count from %s", table); - sql += getWhereString(filterMap); - PreparedStatement pstmt; - pstmt = conn.prepareStatement(sql); - ResultSet rs = pstmt.executeQuery(); - while(rs.next()){ - count = rs.getInt("count"); - break; - } - } catch (SQLException e) { - logger.error("Exception ", e); - }finally { - try { - if(conn != null){ - conn.close(); - } - } catch (SQLException e) { - logger.error("Exception ", e); - } - } - return count; - } - - /** - * query task record by filter map paging - * @param filterMap - * @return - */ - public static List queryAllTaskRecord(Map filterMap , String table) { - - String sql = String.format("select * from %s", table); - sql += getWhereString(filterMap); - - int offset = Integer.parseInt(filterMap.get("offset")); - int pageSize = Integer.parseInt(filterMap.get("pageSize")); - sql += String.format(" order by STARTDATE desc limit %d,%d", offset, pageSize); - - List recordList = new ArrayList<>(); - try{ - recordList = getQueryResult(sql); - }catch (Exception e){ - logger.error("Exception ", e); - } - return recordList; - } - - /** - * convert result set to task record - * @param resultSet - * @return - * @throws SQLException - */ - private static TaskRecord convertToTaskRecord(ResultSet resultSet) throws SQLException { - - TaskRecord taskRecord = new TaskRecord(); - taskRecord.setId(resultSet.getInt("ID")); - taskRecord.setProcId(resultSet.getInt("PROC_ID")); - taskRecord.setProcName(resultSet.getString("PROC_NAME")); - taskRecord.setProcDate(resultSet.getString("PROC_DATE")); - taskRecord.setStartTime(DateUtils.stringToDate(resultSet.getString("STARTDATE"))); - taskRecord.setEndTime(DateUtils.stringToDate(resultSet.getString("ENDDATE"))); - taskRecord.setResult(resultSet.getString("RESULT")); - taskRecord.setDuration(resultSet.getInt("DURATION")); - taskRecord.setNote(resultSet.getString("NOTE")); - taskRecord.setSchema(resultSet.getString("SCHEMA")); - taskRecord.setJobId(resultSet.getString("JOB_ID")); - taskRecord.setSourceTab(resultSet.getString("SOURCE_TAB")); - taskRecord.setSourceRowCount(resultSet.getLong("SOURCE_ROW_COUNT")); - taskRecord.setTargetTab(resultSet.getString("TARGET_TAB")); - taskRecord.setTargetRowCount(resultSet.getLong("TARGET_ROW_COUNT")); - taskRecord.setErrorCode(resultSet.getString("ERROR_CODE")); - return taskRecord; - } - - /** - * query task list by select sql - * @param selectSql - * @return - */ - private static List getQueryResult(String selectSql) { - List recordList = new ArrayList<>(); - Connection conn = null; - try { - conn = getConn(); - if(conn == null){ - return recordList; - } - PreparedStatement pstmt; - pstmt = conn.prepareStatement(selectSql); - ResultSet rs = pstmt.executeQuery(); - - while(rs.next()){ - TaskRecord taskRecord = convertToTaskRecord(rs); - recordList.add(taskRecord); - } - } catch (SQLException e) { - logger.error("Exception ", e); - }finally { - try { - if(conn != null){ - conn.close(); - } - } catch (SQLException e) { - logger.error("Exception ", e); - } - } - return recordList; - } - - /** - * according to procname and procdate query task record - * @param procName - * @param procDate - * @return - */ - public static TaskRecordStatus getTaskRecordState(String procName,String procDate){ - String sql = String.format("SELECT * FROM eamp_hive_log_hd WHERE PROC_NAME='%s' and PROC_DATE like '%s'" - ,procName,procDate + "%"); - List taskRecordList = getQueryResult(sql); - - // contains no record and sql exception - if (CollectionUtils.isEmpty(taskRecordList)){ - // exception - return TaskRecordStatus.EXCEPTION; - }else if (taskRecordList.size() > 1){ - return TaskRecordStatus.EXCEPTION; - }else { - TaskRecord taskRecord = taskRecordList.get(0); - if (taskRecord == null){ - return TaskRecordStatus.EXCEPTION; - } - Long targetRowCount = taskRecord.getTargetRowCount(); - if (targetRowCount <= 0){ - return TaskRecordStatus.FAILURE; - }else { - return TaskRecordStatus.SUCCESS; - } - - } - } - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/config/MybatisPlusConfig.java b/escheduler-dao/src/main/java/cn/escheduler/dao/config/MybatisPlusConfig.java deleted file mode 100644 index fa5723ba9d..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/config/MybatisPlusConfig.java +++ /dev/null @@ -1,17 +0,0 @@ -package cn.escheduler.dao.config; - -import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; -import org.mybatis.spring.annotation.MapperScan; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - - -@Configuration -@MapperScan("cn.escheduler.*.mapper") -public class MybatisPlusConfig { - @Bean - public PaginationInterceptor paginationInterceptor() { - return new PaginationInterceptor(); - } - -} \ No newline at end of file diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/config/YmlConfig.java b/escheduler-dao/src/main/java/cn/escheduler/dao/config/YmlConfig.java deleted file mode 100755 index c650fc2fe0..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/config/YmlConfig.java +++ /dev/null @@ -1,59 +0,0 @@ -package cn.escheduler.dao.config; - - - -import org.yaml.snakeyaml.*; - -import java.io.InputStream; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.Map; - -/** - * Created by qiaozhanwei on 2019/9/17. - */ -public class YmlConfig { - - private static Map allMap=new HashMap(); - static { - Yaml yaml = new Yaml(); - InputStream inputStream = YmlConfig.class.getResourceAsStream("/application.yml"); - Iterator result = yaml.loadAll(inputStream).iterator(); - while(result.hasNext()){ - Map map=(Map)result.next(); - iteratorYml( map,null); - } - } - - public static void main(String[] args) { - String ss = allMap.get("spring.datasource.url"); - System.out.println(ss); - } - - public static void iteratorYml(Map map,String key){ - Iterator iterator = map.entrySet().iterator(); - while(iterator.hasNext()){ - Map.Entry entry = (Map.Entry) iterator.next(); - Object key2 = entry.getKey(); - Object value = entry.getValue(); - if(value instanceof LinkedHashMap){ - if(key==null){ - iteratorYml((Map)value,key2.toString()); - }else{ - iteratorYml((Map)value,key+"."+key2.toString()); - } - } - if(value instanceof String){ - if(key==null){ - allMap.put(key2.toString(), value.toString()); - } - if(key!=null){ - allMap.put(key+"."+key2.toString(), value.toString()); - } - } - } - - } - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/datasource/ConnectionFactory.java b/escheduler-dao/src/main/java/cn/escheduler/dao/datasource/ConnectionFactory.java deleted file mode 100644 index f256726bc3..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/datasource/ConnectionFactory.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.datasource; - -import com.alibaba.druid.pool.DruidDataSource; -import org.apache.ibatis.mapping.Environment; -import org.apache.ibatis.session.Configuration; -import org.apache.ibatis.session.SqlSession; -import org.apache.ibatis.session.SqlSessionFactory; -import org.apache.ibatis.session.SqlSessionFactoryBuilder; -import org.apache.ibatis.transaction.TransactionFactory; -import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory; -import org.mybatis.spring.SqlSessionTemplate; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.sql.DataSource; - - - -/** - * data source connection factory - */ -public class ConnectionFactory { - private static final Logger logger = LoggerFactory.getLogger(ConnectionFactory.class); - - private static SqlSessionFactory sqlSessionFactory; - - /** - * get the data source - */ - public static DruidDataSource getDataSource() { - DruidDataSource druidDataSource = new DruidDataSource(); - - druidDataSource.setDriverClassName("com.mysql.jdbc.Driver"); - druidDataSource.setUrl("jdbc:mysql://192.168.220.188:3306/escheduler?useUnicode=true&characterEncoding=UTF-8"); - druidDataSource.setUsername("root"); - druidDataSource.setPassword("root@123"); - druidDataSource.setInitialSize(5); - druidDataSource.setMinIdle(5); - druidDataSource.setMaxActive(20); - druidDataSource.setMaxWait(60000); - druidDataSource.setTimeBetweenEvictionRunsMillis(60000); - druidDataSource.setMinEvictableIdleTimeMillis(300000); - druidDataSource.setValidationQuery("SELECT 1"); - return druidDataSource; - } - - /** - * get sql session factory - */ - public static SqlSessionFactory getSqlSessionFactory() { - if (sqlSessionFactory == null) { - synchronized (ConnectionFactory.class) { - if (sqlSessionFactory == null) { - DataSource dataSource = getDataSource(); - TransactionFactory transactionFactory = new JdbcTransactionFactory(); - - Environment environment = new Environment("development", transactionFactory, dataSource); - - Configuration configuration = new Configuration(environment); - configuration.setLazyLoadingEnabled(true); - configuration.addMappers("cn.escheduler.dao.mapper"); - - - SqlSessionFactoryBuilder builder = new SqlSessionFactoryBuilder(); - sqlSessionFactory = builder.build(configuration); - } - } - } - - return sqlSessionFactory; - } - - /** - * get sql session - */ - public static SqlSession getSqlSession() { - return new SqlSessionTemplate(getSqlSessionFactory()); - } - - public static T getMapper(Class type){ - return getSqlSession().getMapper(type); - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/AccessToken.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/AccessToken.java deleted file mode 100644 index c6cd118a42..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/AccessToken.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -@Data -@TableName("t_escheduler_access_token") -public class AccessToken { - - @TableId(value="id", type=IdType.AUTO) - private int id; - - - @TableField("user_id") - private int userId; - - @TableField("token") - private String token; - - @TableField(exist = false) - private String userName; - - @TableField("expire_time") - private Date expireTime; - - @TableField("create_time") - private Date createTime; - - @TableField("update_time") - private Date updateTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public String getToken() { - return token; - } - - public void setToken(String token) { - this.token = token; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public Date getExpireTime() { - return expireTime; - } - - public void setExpireTime(Date expireTime) { - this.expireTime = expireTime; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Alert.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Alert.java deleted file mode 100644 index 77dfe8ea3d..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Alert.java +++ /dev/null @@ -1,236 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.*; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - -/** - * alert - */ -@Data -@TableName("t_escheduler_alert") -public class Alert { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * title - */ - private String title; - - /** - * show type - */ - private ShowType showType; - /** - * content - */ - private String content; - - /** - * alert type - */ - private AlertType alertType; - - - - /** - * alert status - */ - private AlertStatus alertStatus; - - /** - * log - */ - private String log; - - /** - * alert group id - */ - @TableField("alertgroup_id") - private int alertGroupId; - - - /** - * receivers - */ - private String receivers; - - /** - * receivers cc - */ - private String receiversCc; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - - @TableField(exist = false) - private Map info = new HashMap<>(); - - public Map getInfo() { - return info; - } - - public void setInfo(Map info) { - this.info = info; - } - - public Alert() { - } - - public Alert(int id, String title) { - this.id = id; - this.title = title; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getTitle() { - return title; - } - - public void setTitle(String title) { - this.title = title; - } - - public ShowType getShowType() { - return showType; - } - - public void setShowType(ShowType showType) { - this.showType = showType; - } - - public String getContent() { - return content; - } - - public void setContent(String content) { - this.content = content; - } - - public AlertType getAlertType() { - return alertType; - } - - public void setAlertType(AlertType alertType) { - this.alertType = alertType; - } - - - public AlertStatus getAlertStatus() { - return alertStatus; - } - - public void setAlertStatus(AlertStatus alertStatus) { - this.alertStatus = alertStatus; - } - - public String getLog() { - return log; - } - - public void setLog(String log) { - this.log = log; - } - - public int getAlertGroupId() { - return alertGroupId; - } - - public void setAlertGroupId(int alertGroupId) { - this.alertGroupId = alertGroupId; - } - - public Date getCreateTime() { - return createTime; - } - - public String getReceivers() { - return receivers; - } - - public void setReceivers(String receivers) { - this.receivers = receivers; - } - - public String getReceiversCc() { - return receiversCc; - } - - public void setReceiversCc(String receiversCc) { - this.receiversCc = receiversCc; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - @Override - public String toString() { - return "Alert{" + - "id=" + id + - ", title='" + title + '\'' + - ", showType=" + showType + - ", content='" + content + '\'' + - ", alertType=" + alertType + - ", alertStatus=" + alertStatus + - ", log='" + log + '\'' + - ", alertGroupId=" + alertGroupId + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - ", receivers='" + receivers + '\'' + - ", receiversCc='" + receiversCc + '\'' + - ", info=" + info + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/AlertGroup.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/AlertGroup.java deleted file mode 100644 index 6fef715aa9..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/AlertGroup.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.AlertType; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -@Data -@TableName("t_escheduler_alertgroup") -public class AlertGroup { - - - /** - * primary key - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * alert group name - */ - private String groupName; - - /** - * alert group type - */ - private AlertType groupType; - - /** - * alert group description - */ - private String description; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getGroupName() { - return groupName; - } - - public void setGroupName(String groupName) { - this.groupName = groupName; - } - - public AlertType getGroupType() { - return groupType; - } - - public void setGroupType(AlertType groupType) { - this.groupType = groupType; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Command.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Command.java deleted file mode 100644 index 36cfa38ce3..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Command.java +++ /dev/null @@ -1,288 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.*; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * command - */ -@Data -@TableName("t_escheduler_command") -public class Command { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * command type - */ - @TableField("command_type") - private CommandType commandType; - - /** - * process definition id - */ - @TableField("process_definition_id") - private int processDefinitionId; - - /** - * executor id - */ - @TableField("executor_id") - private int executorId; - - /** - * command parameter, format json - */ - @TableField("command_param") - private String commandParam; - - /** - * task depend type - */ - @TableField("task_depend_type") - private TaskDependType taskDependType; - - /** - * failure strategy - */ - @TableField("failure_strategy") - private FailureStrategy failureStrategy; - - /** - * warning type - */ - @TableField("warning_type") - private WarningType warningType; - - /** - * warning group id - */ - @TableField("warning_type") - private Integer warningGroupId; - - /** - * schedule time - */ - @TableField("schedule_time") - private Date scheduleTime; - - /** - * start time - */ - @TableField("start_time") - private Date startTime; - - /** - * process instance priority - */ - @TableField("process_instance_priority") - private Priority processInstancePriority; - - /** - * update time - */ - @TableField("update_time") - private Date updateTime; - - - /** - * - */ - @TableField("worker_group_id;") - private int workerGroupId; - - - public Command() { - this.taskDependType = TaskDependType.TASK_POST; - this.failureStrategy = FailureStrategy.CONTINUE; - this.startTime = new Date(); - this.updateTime = new Date(); - } - - public Command( - CommandType commandType, - TaskDependType taskDependType, - FailureStrategy failureStrategy, - int executorId, - int processDefinitionId, - String commandParam, - WarningType warningType, - int warningGroupId, - Date scheduleTime, - Priority processInstancePriority) { - this.commandType = commandType; - this.executorId = executorId; - this.processDefinitionId = processDefinitionId; - this.commandParam = commandParam; - this.warningType = warningType; - this.warningGroupId = warningGroupId; - this.scheduleTime = scheduleTime; - this.taskDependType = taskDependType; - this.failureStrategy = failureStrategy; - this.startTime = new Date(); - this.updateTime = new Date(); - this.processInstancePriority = processInstancePriority; - } - - - public TaskDependType getTaskDependType() { - return taskDependType; - } - - public void setTaskDependType(TaskDependType taskDependType) { - this.taskDependType = taskDependType; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public CommandType getCommandType() { - return commandType; - } - - public void setCommandType(CommandType commandType) { - this.commandType = commandType; - } - - public int getProcessDefinitionId() { - return processDefinitionId; - } - - public void setProcessDefinitionId(int processDefinitionId) { - this.processDefinitionId = processDefinitionId; - } - - - public FailureStrategy getFailureStrategy() { - return failureStrategy; - } - - public void setFailureStrategy(FailureStrategy failureStrategy) { - this.failureStrategy = failureStrategy; - } - - public void setCommandParam(String commandParam) { - this.commandParam = commandParam; - } - - public String getCommandParam() { - return commandParam; - } - - public WarningType getWarningType() { - return warningType; - } - - public void setWarningType(WarningType warningType) { - this.warningType = warningType; - } - - public Integer getWarningGroupId() { - return warningGroupId; - } - - public void setWarningGroupId(Integer warningGroupId) { - this.warningGroupId = warningGroupId; - } - - public Date getScheduleTime() { - return scheduleTime; - } - - public void setScheduleTime(Date scheduleTime) { - this.scheduleTime = scheduleTime; - } - - public Date getStartTime() { - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public int getExecutorId() { - return executorId; - } - - public void setExecutorId(int executorId) { - this.executorId = executorId; - } - - public Priority getProcessInstancePriority() { - return processInstancePriority; - } - - public void setProcessInstancePriority(Priority processInstancePriority) { - this.processInstancePriority = processInstancePriority; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - - public int getWorkerGroupId() { - return workerGroupId; - } - - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; - } - - @Override - public String toString() { - return "Command{" + - "id=" + id + - ", commandType=" + commandType + - ", processDefinitionId=" + processDefinitionId + - ", executorId=" + executorId + - ", commandParam='" + commandParam + '\'' + - ", taskDependType=" + taskDependType + - ", failureStrategy=" + failureStrategy + - ", warningType=" + warningType + - ", warningGroupId=" + warningGroupId + - ", scheduleTime=" + scheduleTime + - ", startTime=" + startTime + - ", processInstancePriority=" + processInstancePriority + - ", updateTime=" + updateTime + - ", workerGroupId=" + workerGroupId + - '}'; - } -} - diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/CommandCount.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/CommandCount.java deleted file mode 100644 index 334d41562a..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/CommandCount.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - - -import cn.escheduler.common.enums.CommandType; - -public class CommandCount { - - - /** - * execution state - */ - private CommandType commandType; - - /** - * count for state - */ - private int count; - - - @Override - public String toString(){ - return "command count:" + - " commandType: "+ commandType.toString() + - " count: "+ count; - } - - public CommandType getCommandType() { - return commandType; - } - - public void setCommandType(CommandType commandType) { - this.commandType = commandType; - } - - public int getCount() { - return count; - } - - public void setCount(int count) { - this.count = count; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/CycleDependency.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/CycleDependency.java deleted file mode 100644 index 3ac5533f98..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/CycleDependency.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.CycleEnum; - -import java.util.Date; - -/** - * cycle dependency - */ -public class CycleDependency { - /** - * process define id - */ - private int processDefineId; - /** - * last schedule time - */ - private Date lastScheduleTime; - /** - * expiration time - */ - private Date expirationTime; - /** - * cycle enum - */ - private CycleEnum cycleEnum; - - - public CycleDependency(int processDefineId, Date lastScheduleTime, Date expirationTime, CycleEnum cycleEnum) { - this.processDefineId = processDefineId; - this.lastScheduleTime = lastScheduleTime; - this.expirationTime = expirationTime; - this.cycleEnum = cycleEnum; - } - - public int getProcessDefineId() { - return processDefineId; - } - - public void setProcessDefineId(int processDefineId) { - this.processDefineId = processDefineId; - } - - public Date getLastScheduleTime() { - return lastScheduleTime; - } - - public void setLastScheduleTime(Date lastScheduleTime) { - this.lastScheduleTime = lastScheduleTime; - } - - public Date getExpirationTime() { - return expirationTime; - } - - public void setExpirationTime(Date expirationTime) { - this.expirationTime = expirationTime; - } - - public CycleEnum getCycleEnum() { - return cycleEnum; - } - - public void setCycleEnum(CycleEnum cycleEnum) { - this.cycleEnum = cycleEnum; - } - - @Override - public String toString() { - return "CycleDependency{" + - "processDefineId=" + processDefineId + - ", lastScheduleTime=" + lastScheduleTime + - ", expirationTime=" + expirationTime + - ", cycleEnum=" + cycleEnum + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/DataSource.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/DataSource.java deleted file mode 100644 index 5fd4db6dd8..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/DataSource.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - - - -import cn.escheduler.common.enums.DbType; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -@Data -@TableName("t_escheduler_datasource") -public class DataSource { - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * user id - */ - private int userId; - - /** - * user name - */ - @TableField(exist = false) - private String userName; - - /** - * data source name - */ - private String name; - - /** - * note - */ - private String note; - - /** - * data source type - */ - private DbType type; - - /** - * connection parameters - */ - private String connectionParams; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - public DataSource() { - } - - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getNote() { - return note; - } - - public void setNote(String note) { - this.note = note; - } - - public DbType getType() { - return type; - } - - public void setType(DbType type) { - this.type = type; - } - - public String getConnectionParams() { - return connectionParams; - } - - public void setConnectionParams(String connectionParams) { - this.connectionParams = connectionParams; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - - @Override - public String toString() { - return "DataSource{" + - "id=" + id + - ", userId=" + userId + - ", userName='" + userName + '\'' + - ", name='" + name + '\'' + - ", note='" + note + '\'' + - ", type=" + type + - ", connectionParams='" + connectionParams + '\'' + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - DataSource that = (DataSource) o; - - if (id != that.id) { - return false; - } - return name.equals(that.name); - - } - - @Override - public int hashCode() { - int result = id; - result = 31 * result + name.hashCode(); - return result; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/DatasourceUser.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/DatasourceUser.java deleted file mode 100644 index 2379891630..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/DatasourceUser.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; - -import java.util.Date; - -/** - * data source user relation - */ -@TableName("t_escheduler_relation_datasource_user") -public class DatasourceUser { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * user id - */ - private int userId; - - /** - * data source id - */ - private int datasourceId; - - /** - * permission - */ - private int perm; - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public int getDatasourceId() { - return datasourceId; - } - - public void setDatasourceId(int datasourceId) { - this.datasourceId = datasourceId; - } - - public int getPerm() { - return perm; - } - - public void setPerm(int perm) { - this.perm = perm; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - @Override - public String toString() { - return "DatasourceUser{" + - "id=" + id + - ", userId=" + userId + - ", datasourceId=" + datasourceId + - ", perm=" + perm + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/DefinitionGroupByUser.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/DefinitionGroupByUser.java deleted file mode 100644 index d8d409aebb..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/DefinitionGroupByUser.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -/** - * count definition number group by user - */ -public class DefinitionGroupByUser { - - /** - * user name - */ - private String userName; - - /** - * user id - */ - private Integer userId; - - /** - * count number - */ - private int count; - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public int getCount() { - return count; - } - - public void setCount(int count) { - this.count = count; - } - - public Integer getUserId() { - return userId; - } - - public void setUserId(Integer userId) { - this.userId = userId; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Dependency.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Dependency.java deleted file mode 100644 index cb85d8137b..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Dependency.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.SelfDependStrategy; - -/** - * dependency - */ -public class Dependency { - - /** - * self depend strategy - */ - private SelfDependStrategy self; - - /** - * outer dependency string - */ - private String outer; - - - public Dependency(){} - - public Dependency(String outer, SelfDependStrategy self){ - - this.outer = outer; - this.self = self; - - } - - - public SelfDependStrategy getSelf() { - return self; - } - - public void setSelf(SelfDependStrategy self) { - this.self = self; - } - - public String getOuter() { - return outer; - } - - public void setOuter(String outer) { - this.outer = outer; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ErrorCommand.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ErrorCommand.java deleted file mode 100644 index 2af75329a3..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ErrorCommand.java +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.*; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; - -import java.util.Date; - -/** - * command - */ -@TableName("t_escheduler_error_command") -public class ErrorCommand { - - /** - * id - */ - @TableId(value="id", type = IdType.INPUT) - private int id; - - /** - * command type - */ - private CommandType commandType; - - /** - * process definition id - */ - private int processDefinitionId; - - /** - * executor id - */ - private int executorId; - - /** - * command parameter, format json - */ - private String commandParam; - - /** - * task depend type - */ - private TaskDependType taskDependType; - - /** - * failure strategy - */ - private FailureStrategy failureStrategy; - - /** - * warning type - */ - private WarningType warningType; - - /** - * warning group id - */ - private Integer warningGroupId; - - /** - * schedule time - */ - private Date scheduleTime; - - /** - * start time - */ - private Date startTime; - - /** - * process instance priority - */ - private Priority processInstancePriority; - - /** - * update time - */ - private Date updateTime; - - /** - * 执行信息 - */ - private String message; - - /** - * worker group id - */ - private int workerGroupId; - - public ErrorCommand(){} - - public ErrorCommand(Command command, String message){ - this.id = command.getId(); - this.commandType = command.getCommandType(); - this.executorId = command.getExecutorId(); - this.processDefinitionId = command.getProcessDefinitionId(); - this.commandParam = command.getCommandParam(); - this.warningType = command.getWarningType(); - this.warningGroupId = command.getWarningGroupId(); - this.scheduleTime = command.getScheduleTime(); - this.taskDependType = command.getTaskDependType(); - this.failureStrategy = command.getFailureStrategy(); - this.startTime = command.getStartTime(); - this.updateTime = command.getUpdateTime(); - this.processInstancePriority = command.getProcessInstancePriority(); - this.message = message; - } - - public ErrorCommand( - CommandType commandType, - TaskDependType taskDependType, - FailureStrategy failureStrategy, - int executorId, - int processDefinitionId, - String commandParam, - WarningType warningType, - int warningGroupId, - Date scheduleTime, - Priority processInstancePriority, - String message){ - this.commandType = commandType; - this.executorId = executorId; - this.processDefinitionId = processDefinitionId; - this.commandParam = commandParam; - this.warningType = warningType; - this.warningGroupId = warningGroupId; - this.scheduleTime = scheduleTime; - this.taskDependType = taskDependType; - this.failureStrategy = failureStrategy; - this.startTime = new Date(); - this.updateTime = new Date(); - this.processInstancePriority = processInstancePriority; - this.message = message; - } - - - public TaskDependType getTaskDependType() { - return taskDependType; - } - - public void setTaskDependType(TaskDependType taskDependType) { - this.taskDependType = taskDependType; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public CommandType getCommandType() { - return commandType; - } - - public void setCommandType(CommandType commandType) { - this.commandType = commandType; - } - - public int getProcessDefinitionId() { - return processDefinitionId; - } - - public void setProcessDefinitionId(int processDefinitionId) { - this.processDefinitionId = processDefinitionId; - } - - - public FailureStrategy getFailureStrategy() { - return failureStrategy; - } - - public void setFailureStrategy(FailureStrategy failureStrategy) { - this.failureStrategy = failureStrategy; - } - - public void setCommandParam(String commandParam) { - this.commandParam = commandParam; - } - - public String getCommandParam() { - return commandParam; - } - - public WarningType getWarningType() { - return warningType; - } - - public void setWarningType(WarningType warningType) { - this.warningType = warningType; - } - - public Integer getWarningGroupId() { - return warningGroupId; - } - - public void setWarningGroupId(Integer warningGroupId) { - this.warningGroupId = warningGroupId; - } - - public Date getScheduleTime() { - return scheduleTime; - } - - public void setScheduleTime(Date scheduleTime) { - this.scheduleTime = scheduleTime; - } - - public Date getStartTime() { - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public int getExecutorId() { - return executorId; - } - - public void setExecutorId(int executorId) { - this.executorId = executorId; - } - - public Priority getProcessInstancePriority() { - return processInstancePriority; - } - - public void setProcessInstancePriority(Priority processInstancePriority) { - this.processInstancePriority = processInstancePriority; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public int getWorkerGroupId() { - return workerGroupId; - } - - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; - } - - @Override - public String toString() { - return "Command{" + - "id=" + id + - ", commandType=" + commandType + - ", processDefinitionId=" + processDefinitionId + - ", executorId=" + executorId + - ", commandParam='" + commandParam + '\'' + - ", taskDependType=" + taskDependType + - ", failureStrategy=" + failureStrategy + - ", warningType=" + warningType + - ", warningGroupId=" + warningGroupId + - ", scheduleTime=" + scheduleTime + - ", startTime=" + startTime + - ", processInstancePriority=" + processInstancePriority + - ", updateTime=" + updateTime + - ", message=" + message + - '}'; - } - - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } - - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ExecuteStatusCount.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ExecuteStatusCount.java deleted file mode 100644 index 9b224e3913..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ExecuteStatusCount.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - - -import cn.escheduler.common.enums.ExecutionStatus; - -/** - * count execute state - * - */ -public class ExecuteStatusCount { - - /** - * execution state - */ - private ExecutionStatus state; - - /** - * count for state - */ - private int count; - - public ExecutionStatus getExecutionStatus() { - return state; - } - - public void setExecutionStatus(ExecutionStatus executionStatus) { - this.state = executionStatus; - } - - public int getCount() { - return count; - } - - public void setCount(int count) { - this.count = count; - } - - @Override - public String toString() { - return "ExecuteStatusCount{" + - "state=" + state + - ", count=" + count + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/MonitorRecord.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/MonitorRecord.java deleted file mode 100644 index be3adc76f1..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/MonitorRecord.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import java.util.Date; - -/** - * monitor record for database - */ -public class MonitorRecord { - - /** - * is normal or not , 1: normal - */ - private int state; - - /** - * max connections - */ - private long maxConnections; - - /** - * max used connections - */ - private long maxUsedConnections; - - /** - * threads connections - */ - private long threadsConnections; - - /** - * threads running connections - */ - private long threadsRunningConnections; - - /** - * start date - */ - private Date date; - - public int getState() { - return state; - } - - public void setState(int state) { - this.state = state; - } - - public long getMaxConnections() { - return maxConnections; - } - - public void setMaxConnections(long maxConnections) { - this.maxConnections = maxConnections; - } - - public long getMaxUsedConnections() { - return maxUsedConnections; - } - - public void setMaxUsedConnections(long maxUsedConnections) { - this.maxUsedConnections = maxUsedConnections; - } - - public long getThreadsConnections() { - return threadsConnections; - } - - public void setThreadsConnections(long threadsConnections) { - this.threadsConnections = threadsConnections; - } - - public long getThreadsRunningConnections() { - return threadsRunningConnections; - } - - public void setThreadsRunningConnections(long threadsRunningConnections) { - this.threadsRunningConnections = threadsRunningConnections; - } - - public Date getDate() { - return date; - } - - public void setDate(Date date) { - this.date = date; - } - - @Override - public String toString() { - return "MonitorRecord{" + - "state=" + state + - ", maxConnections=" + maxConnections + - ", maxUsedConnections=" + maxUsedConnections + - ", threadsConnections=" + threadsConnections + - ", threadsRunningConnections=" + threadsRunningConnections + - ", date=" + date + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessData.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessData.java deleted file mode 100644 index e292b71e97..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessData.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.utils.CollectionUtils; - -import java.util.List; - -/** - * definition json data structure - */ -public class ProcessData { - /** - * task list - */ - private List tasks; - - /** - * global parameters - */ - private List globalParams; - - - private int timeout; - - private int tenantId; - - - public ProcessData() { - } - - /** - * - * @param tasks - * @param globalParams - */ - public ProcessData(List tasks, List globalParams) { - this.tasks = tasks; - this.globalParams = globalParams; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - - if (o == null || getClass() != o.getClass()) { - return false; - } - - ProcessData that = (ProcessData) o; - - return CollectionUtils.equalLists(tasks, that.tasks) && - CollectionUtils.equalLists(globalParams, that.globalParams); - } - - public List getTasks() { - return tasks; - } - - public void setTasks(List tasks) { - this.tasks = tasks; - } - - public List getGlobalParams() { - return globalParams; - } - - public void setGlobalParams(List globalParams) { - this.globalParams = globalParams; - } - - public int getTimeout() { - return timeout; - } - - public void setTimeout(int timeout) { - this.timeout = timeout; - } - - public int getTenantId() { - return tenantId; - } - - public void setTenantId(int tenantId) { - this.tenantId = tenantId; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessDefinition.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessDefinition.java deleted file mode 100644 index c171c7c125..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessDefinition.java +++ /dev/null @@ -1,384 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.*; -import cn.escheduler.common.process.Property; -import com.alibaba.fastjson.JSONObject; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import com.baomidou.mybatisplus.core.toolkit.StringUtils; -import lombok.Data; - -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - - -/** - * process definition - */ -@Data -@TableName("t_escheduler_process_definition") -public class ProcessDefinition { - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * name - */ - private String name; - - /** - * version - */ - private int version; - - /** - * release state : online/offline - */ - private ReleaseState releaseState; - - /** - * project id - */ - private int projectId; - - /** - * definition json string - */ - private String processDefinitionJson; - - /** - * description - */ - private String description; - - /** - * user defined parameters - */ - private String globalParams; - - /** - * user defined parameter list - */ - @TableField(exist=false) - private List globalParamList; - - /** - * user define parameter map - */ - @TableField(exist=false) - private Map globalParamMap; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - /** - * process is valid: yes/no - */ - private Flag flag; - - /** - * process user id - */ - private int userId; - - /** - * user name - */ - @TableField(exist = false) - private String userName; - - /** - * project name - */ - @TableField(exist = false) - private String projectName; - - /** - * locations array for web - */ - private String locations; - - /** - * connects array for web - */ - private String connects; - - /** - * receivers - */ - private String receivers; - - /** - * receivers cc - */ - private String receiversCc; - - /** - * schedule release state : online/offline - */ - @TableField(exist=false) - private ReleaseState scheduleReleaseState; - - /** - * process warning time out. unit: minute - */ - private int timeout; - - /** - * tenant id - */ - private int tenantId; - - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public int getVersion() { - return version; - } - - public void setVersion(int version) { - this.version = version; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public ReleaseState getReleaseState() { - return releaseState; - } - - public void setReleaseState(ReleaseState releaseState) { - this.releaseState = releaseState; - } - - public String getProcessDefinitionJson() { - return processDefinitionJson; - } - - public void setProcessDefinitionJson(String processDefinitionJson) { - this.processDefinitionJson = processDefinitionJson; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public int getProjectId() { - return projectId; - } - - public void setProjectId(int projectId) { - this.projectId = projectId; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public Flag getFlag() { - return flag; - } - - public void setFlag(Flag flag) { - this.flag = flag; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public String getProjectName() { - return projectName; - } - - public void setProjectName(String projectName) { - this.projectName = projectName; - } - - - public String getGlobalParams() { - return globalParams; - } - - public void setGlobalParams(String globalParams) { - this.globalParamList = JSONObject.parseArray(globalParams, Property.class); - this.globalParams = globalParams; - } - - public List getGlobalParamList() { - return globalParamList; - } - - public void setGlobalParamList(List globalParamList) { - this.globalParams = JSONObject.toJSONString(globalParamList); - this.globalParamList = globalParamList; - } - - public Map getGlobalParamMap() { - List propList; - - if (globalParamMap == null && StringUtils.isNotEmpty(globalParams)) { - propList = JSONObject.parseArray(globalParams, Property.class); - globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); - } - - return globalParamMap; - } - - public void setGlobalParamMap(Map globalParamMap) { - this.globalParamMap = globalParamMap; - } - - public String getLocations() { - return locations; - } - - public void setLocations(String locations) { - this.locations = locations; - } - - public String getConnects() { - return connects; - } - - public void setConnects(String connects) { - this.connects = connects; - } - - public String getReceivers() { - return receivers; - } - - public void setReceivers(String receivers) { - this.receivers = receivers; - } - - public String getReceiversCc() { - return receiversCc; - } - - public void setReceiversCc(String receiversCc) { - this.receiversCc = receiversCc; - } - - public ReleaseState getScheduleReleaseState() { - return scheduleReleaseState; - } - - public void setScheduleReleaseState(ReleaseState scheduleReleaseState) { - this.scheduleReleaseState = scheduleReleaseState; - } - - public int getTimeout() { - return timeout; - } - - public void setTimeout(int timeout) { - this.timeout = timeout; - } - - @Override - public String toString() { - return "ProcessDefinition{" + - "id=" + id + - ", name='" + name + '\'' + - ", version=" + version + - ", releaseState=" + releaseState + - ", projectId=" + projectId + - ", processDefinitionJson='" + processDefinitionJson + '\'' + - ", globalParams='" + globalParams + '\'' + - ", globalParamList=" + globalParamList + - ", globalParamMap=" + globalParamMap + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - ", flag=" + flag + - ", userId=" + userId + - ", userName='" + userName + '\'' + - ", projectName='" + projectName + '\'' + - ", locations='" + locations + '\'' + - ", connects='" + connects + '\'' + - ", receivers='" + receivers + '\'' + - ", receiversCc='" + receiversCc + '\'' + - ", scheduleReleaseState=" + scheduleReleaseState + - ", timeout=" + timeout + - ", tenantId=" + tenantId + - '}'; - } - - public int getTenantId() { - return tenantId; - } - - public void setTenantId(int tenantId) { - this.tenantId = tenantId; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstance.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstance.java deleted file mode 100644 index 7af74ac06a..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstance.java +++ /dev/null @@ -1,620 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.*; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import com.baomidou.mybatisplus.core.toolkit.StringUtils; -import lombok.Data; - -import java.util.Date; - -/** - * process instance - */ -@Data -@TableName("t_escheduler_process_instance") -public class ProcessInstance { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - /** - * process definition id - */ - private int processDefinitionId; - /** - * process state - */ - private ExecutionStatus state; - /** - * recovery flag for failover - */ - private Flag recovery; - /** - * start time - */ - private Date startTime; - - /** - * end time - */ - private Date endTime; - - /** - * run time - */ - private int runTimes; - - /** - * name - */ - private String name; - - /** - * host - */ - private String host; - - /** - * process definition structure - */ - @TableField(exist = false) - private ProcessDefinition processDefinition; - /** - * process command type - */ - private CommandType commandType; - - /** - * command parameters - */ - private String commandParam; - - /** - * node depend type - */ - private TaskDependType taskDependType; - - /** - * task max try times - */ - private int maxTryTimes; - - /** - * failure strategy when task failed. - */ - private FailureStrategy failureStrategy; - - /** - * warning type - */ - private WarningType warningType; - - /** - * warning group - */ - private Integer warningGroupId; - - /** - * schedule time - */ - private Date scheduleTime; - - /** - * command start time - */ - private Date commandStartTime; - - /** - * user define parameters string - */ - private String globalParams; - - /** - * process instance json - */ - private String processInstanceJson; - - /** - * executor id - */ - private int executorId; - - /** - * tenant code - */ - @TableField(exist = false) - private String tenantCode; - - /** - * queue - */ - @TableField(exist = false) - private String queue; - - /** - * process is sub process - */ - private Flag isSubProcess; - - /** - * task locations for web - */ - private String locations; - - /** - * task connects for web - */ - private String connects; - - /** - * history command - */ - private String historyCmd; - - /** - * depend processes schedule time - */ - private String dependenceScheduleTimes; - - /** - * process duration - * @return - */ - @TableField(exist = false) - private Long duration; - - /** - * process instance priority - */ - private Priority processInstancePriority; - - /** - * worker group id - */ - private int workerGroupId; - - /** - * process timeout for warning - */ - private int timeout; - - /** - * tenant id - */ - private int tenantId; - - /** - * worker group name. for api. - */ - @TableField(exist = false) - private String workerGroupName; - - /** - * receivers for api - */ - @TableField(exist = false) - private String receivers; - - /** - * receivers cc for api - */ - @TableField(exist = false) - private String receiversCc; - - public ProcessInstance(){ - - } - - /** - * set the process name with process define version and timestamp - * @param processDefinition - */ - public ProcessInstance(ProcessDefinition processDefinition){ - this.processDefinition = processDefinition; - this.name = processDefinition.getName() + "-" + - processDefinition.getVersion() + "-" + - System.currentTimeMillis(); - } - - public ProcessDefinition getProcessDefinition() { - return processDefinition; - } - - public void setProcessDefinition(ProcessDefinition processDefinition) { - this.processDefinition = processDefinition; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getProcessDefinitionId() { - return processDefinitionId; - } - - public void setProcessDefinitionId(int processDefinitionId) { - this.processDefinitionId = processDefinitionId; - } - - public ExecutionStatus getState() { - return state; - } - - public void setState(ExecutionStatus state) { - this.state = state; - } - - public Flag getRecovery() { - return recovery; - } - - public void setRecovery(Flag recovery) { - this.recovery = recovery; - } - - public Date getStartTime() { - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public Date getEndTime() { - return endTime; - } - - public void setEndTime(Date endTime) { - this.endTime = endTime; - } - - public int getRunTimes() { - return runTimes; - } - - public void setRunTimes(int runTimes) { - this.runTimes = runTimes; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - - public CommandType getCommandType() { - return commandType; - } - - public void setCommandType(CommandType commandType) { - this.commandType = commandType; - } - - public String getCommandParam() { - return commandParam; - } - - public void setCommandParam(String commandParam) { - this.commandParam = commandParam; - } - - public TaskDependType getTaskDependType() { - return taskDependType; - } - - public void setTaskDependType(TaskDependType taskDependType) { - this.taskDependType = taskDependType; - } - - - public int getMaxTryTimes() { - return maxTryTimes; - } - - public void setMaxTryTimes(int maxTryTimes) { - this.maxTryTimes = maxTryTimes; - } - - public FailureStrategy getFailureStrategy() { - return failureStrategy; - } - - public void setFailureStrategy(FailureStrategy failureStrategy) { - this.failureStrategy = failureStrategy; - } - - - public boolean IsProcessInstanceStop(){ - return this.state.typeIsFinished(); - } - - public WarningType getWarningType() { - return warningType; - } - - public void setWarningType(WarningType warningType) { - this.warningType = warningType; - } - - public Integer getWarningGroupId() { - return warningGroupId; - } - - public void setWarningGroupId(Integer warningGroupId) { - this.warningGroupId = warningGroupId; - } - - public Date getScheduleTime() { - return scheduleTime; - } - - public void setScheduleTime(Date scheduleTime) { - this.scheduleTime = scheduleTime; - } - - public Date getCommandStartTime() { - return commandStartTime; - } - - public void setCommandStartTime(Date commandStartTime) { - this.commandStartTime = commandStartTime; - } - - public String getGlobalParams() { - return globalParams; - } - - public void setGlobalParams(String globalParams) { - this.globalParams = globalParams; - } - - public String getProcessInstanceJson() { - return processInstanceJson; - } - - public void setProcessInstanceJson(String processInstanceJson) { - this.processInstanceJson = processInstanceJson; - } - - public String getTenantCode() { - return tenantCode; - } - - public void setTenantCode(String tenantCode) { - this.tenantCode = tenantCode; - } - - public String getQueue() { - return queue; - } - - public void setQueue(String queue) { - this.queue = queue; - } - - public int getExecutorId() { - return executorId; - } - - public void setExecutorId(int executorId) { - this.executorId = executorId; - } - - - public Flag getIsSubProcess() { - return isSubProcess; - } - - public void setIsSubProcess(Flag isSubProcess) { - this.isSubProcess = isSubProcess; - } - - public Priority getProcessInstancePriority() { - return processInstancePriority; - } - - public void setProcessInstancePriority(Priority processInstancePriority) { - this.processInstancePriority = processInstancePriority; - } - public String getLocations() { - return locations; - } - - public void setLocations(String locations) { - this.locations = locations; - } - - public String getConnects() { - return connects; - } - - public void setConnects(String connects) { - this.connects = connects; - } - - public String getHistoryCmd() { - return historyCmd; - } - - public void setHistoryCmd(String historyCmd) { - this.historyCmd = historyCmd; - } - - /** - * add command to history - * @param cmd - */ - public void addHistoryCmd(CommandType cmd){ - if(StringUtils.isNotEmpty(this.historyCmd)){ - this.historyCmd = String.format("%s,%s", this.historyCmd, cmd.toString()); - }else{ - this.historyCmd = cmd.toString(); - } - } - - /** - * check this process is start complement data - */ - public Boolean isComplementData(){ - if(!StringUtils.isNotEmpty(this.historyCmd)){ - return false; - } - return historyCmd.startsWith(CommandType.COMPLEMENT_DATA.toString()); - } - - /** - * get current command type, - * if start with complement data,return complement - */ - public CommandType getCmdTypeIfComplement(){ - if(isComplementData()){ - return CommandType.COMPLEMENT_DATA; - } - return commandType; - } - - public String getDependenceScheduleTimes() { - return dependenceScheduleTimes; - } - - public void setDependenceScheduleTimes(String dependenceScheduleTimes) { - this.dependenceScheduleTimes = dependenceScheduleTimes; - } - - public Long getDuration() { - return duration; - } - - public void setDuration(Long duration) { - this.duration = duration; - } - - public int getWorkerGroupId() { - return workerGroupId; - } - - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; - } - - public int getTimeout() { - return timeout; - } - - public void setTimeout(int timeout) { - this.timeout = timeout; - } - - - public void setTenantId(int tenantId) { - this.tenantId = tenantId; - } - - public int getTenantId() { - return this.tenantId ; - } - - public String getWorkerGroupName() { - return workerGroupName; - } - - public void setWorkerGroupName(String workerGroupName) { - this.workerGroupName = workerGroupName; - } - - public String getReceivers() { - return receivers; - } - - public void setReceivers(String receivers) { - this.receivers = receivers; - } - - public String getReceiversCc() { - return receiversCc; - } - - public void setReceiversCc(String receiversCc) { - this.receiversCc = receiversCc; - } - - @Override - public String toString() { - return "ProcessInstance{" + - "id=" + id + - ", processDefinitionId=" + processDefinitionId + - ", state=" + state + - ", recovery=" + recovery + - ", startTime=" + startTime + - ", endTime=" + endTime + - ", runTimes=" + runTimes + - ", name='" + name + '\'' + - ", host='" + host + '\'' + - ", processDefinition=" + processDefinition + - ", commandType=" + commandType + - ", commandParam='" + commandParam + '\'' + - ", taskDependType=" + taskDependType + - ", maxTryTimes=" + maxTryTimes + - ", failureStrategy=" + failureStrategy + - ", warningType=" + warningType + - ", warningGroupId=" + warningGroupId + - ", scheduleTime=" + scheduleTime + - ", commandStartTime=" + commandStartTime + - ", globalParams='" + globalParams + '\'' + - ", processInstanceJson='" + processInstanceJson + '\'' + - ", executorId=" + executorId + - ", tenantCode='" + tenantCode + '\'' + - ", queue='" + queue + '\'' + - ", isSubProcess=" + isSubProcess + - ", locations='" + locations + '\'' + - ", connects='" + connects + '\'' + - ", historyCmd='" + historyCmd + '\'' + - ", dependenceScheduleTimes='" + dependenceScheduleTimes + '\'' + - ", duration=" + duration + - ", processInstancePriority=" + processInstancePriority + - ", workerGroupId=" + workerGroupId + - ", timeout=" + timeout + - ", tenantId=" + tenantId + - ", workerGroupName='" + workerGroupName + '\'' + - ", receivers='" + receivers + '\'' + - ", receiversCc='" + receiversCc + '\'' + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstanceMap.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstanceMap.java deleted file mode 100644 index 2951598c8b..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProcessInstanceMap.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -/** - * process instance map - */ -@Data -@TableName("t_escheduler_relation_process_instance") -public class ProcessInstanceMap { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * parent process instance id - */ - private int parentProcessInstanceId; - - /** - * parent task instance id - */ - private int parentTaskInstanceId; - - /** - * process instance id - */ - private int processInstanceId; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getParentProcessInstanceId() { - return parentProcessInstanceId; - } - - public void setParentProcessInstanceId(int parentProcessInstanceId) { - this.parentProcessInstanceId = parentProcessInstanceId; - } - - public int getParentTaskInstanceId() { - return parentTaskInstanceId; - } - - public void setParentTaskInstanceId(int parentTaskInstanceId) { - this.parentTaskInstanceId = parentTaskInstanceId; - } - - public int getProcessInstanceId() { - return processInstanceId; - } - - public void setProcessInstanceId(int processInstanceId) { - this.processInstanceId = processInstanceId; - } - - @Override - public String toString() { - return "ProcessInstanceMap{" + - "id=" + id + - ", parentProcessInstanceId=" + parentProcessInstanceId + - ", parentTaskInstanceId=" + parentTaskInstanceId + - ", processInstanceId=" + processInstanceId + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Project.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Project.java deleted file mode 100644 index 35bebac39b..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Project.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * project - */ -@Data -@TableName("t_escheduler_project") -public class Project { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * user id - */ - @TableField("user_id") - private int userId; - - /** - * user name - */ - @TableField(exist=false) - private String userName; - - /** - * project name - */ - @TableField("name") - private String name; - - /** - * project description - */ - @TableField("`desc`") - private String desc; - - /** - * create time - */ - @TableField("`create_time`") - private Date createTime; - - /** - * update time - */ - @TableField("`update_time`") - private Date updateTime; - - /** - * permission - */ - @TableField(exist=false) - private int perm; - - /** - * process define count - */ - @TableField(exist=false) - private int defCount; - - /** - * process instance running count - */ - @TableField(exist=false) - private int instRunningCount; - - public int getDefCount() { - return defCount; - } - - public void setDefCount(int defCount) { - this.defCount = defCount; - } - - public int getInstRunningCount() { - return instRunningCount; - } - - public void setInstRunningCount(int instRunningCount) { - this.instRunningCount = instRunningCount; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getDesc() { - return desc; - } - - public void setDesc(String desc) { - this.desc = desc; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public int getPerm() { - return perm; - } - - public void setPerm(int perm) { - this.perm = perm; - } - @Override - public String toString() { - return "Project{" + - "id=" + id + - ", userId=" + userId + - ", userName='" + userName + '\'' + - ", name='" + name + '\'' + - ", desc='" + desc + '\'' + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } - - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - Project project = (Project) o; - - if (id != project.id) { - return false; - } - return name.equals(project.name); - - } - - @Override - public int hashCode() { - int result = id; - result = 31 * result + name.hashCode(); - return result; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProjectUser.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProjectUser.java deleted file mode 100644 index cd44f9c1c6..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ProjectUser.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -@Data -@TableName("t_escheduler_relation_project_user") -public class ProjectUser { - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - @TableField("user_id") - private int userId; - - @TableField("project_id") - private int projectId; - - /** - * project name - */ - @TableField(exist = false) - private String projectName; - - /** - * user name - */ - @TableField(exist = false) - private String userName; - - /** - * permission - */ - private int perm; - - @TableField("create_time") - private Date createTime; - - @TableField("update_time") - private Date updateTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public int getProjectId() { - return projectId; - } - - public void setProjectId(int projectId) { - this.projectId = projectId; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public String getProjectName() { - return projectName; - } - - public void setProjectName(String projectName) { - this.projectName = projectName; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public int getPerm() { - return perm; - } - - public void setPerm(int perm) { - this.perm = perm; - } - @Override - public String toString() { - return "ProjectUser{" + - "id=" + id + - ", projectId=" + projectId + - ", projectName='" + projectName + '\'' + - ", userId=" + userId + - ", userName='" + userName + '\'' + - ", perm=" + perm + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Queue.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Queue.java deleted file mode 100644 index feac009a84..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Queue.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * queue - */ -@Data -@TableName("t_escheduler_queue") -public class Queue { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - /** - * queue name - */ - private String queueName; - /** - * yarn queue name - */ - private String queue; - - /** - * create time - */ - private Date createTime; - /** - * update time - */ - private Date updateTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getQueueName() { - return queueName; - } - - public void setQueueName(String queueName) { - this.queueName = queueName; - } - - public String getQueue() { - return queue; - } - - public void setQueue(String queue) { - this.queue = queue; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - @Override - public String toString() { - return "Queue{" + - "id=" + id + - ", queueName='" + queueName + '\'' + - ", queue='" + queue + '\'' + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Resource.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Resource.java deleted file mode 100644 index a631753ad9..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Resource.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - - -import cn.escheduler.common.enums.ResourceType; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -@Data -@TableName("t_escheduler_resources") -public class Resource { - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * resource alias - */ - private String alias; - - /** - * description - */ - private String desc; - - /** - * file alias - */ - private String fileName; - - /** - * user id - */ - private int userId; - - /** - * resource type - */ - private ResourceType type; - - /** - * resource size - */ - private long size; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - public Resource() { - } - - public Resource(int id, String alias, String fileName, String desc, int userId, - ResourceType type, long size, - Date createTime, Date updateTime) { - this.id = id; - this.alias = alias; - this.fileName = fileName; - this.desc = desc; - this.userId = userId; - this.type = type; - this.size = size; - this.createTime = createTime; - this.updateTime = updateTime; - } - - public Resource(String alias, String fileName, String desc, int userId, ResourceType type, long size, Date createTime, Date updateTime) { - this.alias = alias; - this.fileName = fileName; - this.desc = desc; - this.userId = userId; - this.type = type; - this.size = size; - this.createTime = createTime; - this.updateTime = updateTime; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getAlias() { - return alias; - } - - public void setAlias(String alias) { - this.alias = alias; - } - - public String getFileName() { - return fileName; - } - - public void setFileName(String fileName) { - this.fileName = fileName; - } - - public String getDesc() { - return desc; - } - - public void setDesc(String desc) { - this.desc = desc; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - - public ResourceType getType() { - return type; - } - - public void setType(ResourceType type) { - this.type = type; - } - - public long getSize() { - return size; - } - - public void setSize(long size) { - this.size = size; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - @Override - public String toString() { - return "Resource{" + - "id=" + id + - ", alias='" + alias + '\'' + - ", fileName='" + fileName + '\'' + - ", desc='" + desc + '\'' + - ", userId=" + userId + - ", type=" + type + - ", size=" + size + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - Resource resource = (Resource) o; - - if (id != resource.id) { - return false; - } - return alias.equals(resource.alias); - - } - - @Override - public int hashCode() { - int result = id; - result = 31 * result + alias.hashCode(); - return result; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ResourcesUser.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ResourcesUser.java deleted file mode 100644 index 92ab7955bf..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ResourcesUser.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * resource user relation - */ -@Data -@TableName("t_escheduler_relation_resources_user") -public class ResourcesUser { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * user id - */ - private int userId; - - /** - * resource id - */ - private int resourcesId; - - /** - * permission - */ - private int perm; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public int getResourcesId() { - return resourcesId; - } - - public void setResourcesId(int resourcesId) { - this.resourcesId = resourcesId; - } - - public int getPerm() { - return perm; - } - - public void setPerm(int perm) { - this.perm = perm; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - @Override - public String toString() { - return "ResourcesUser{" + - "id=" + id + - ", userId=" + userId + - ", resourcesId=" + resourcesId + - ", perm=" + perm + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Schedule.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Schedule.java deleted file mode 100644 index 069ebc3ae6..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Schedule.java +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.*; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * schedule - * - */ -@Data -@TableName("t_escheduler_schedules") -public class Schedule { - - @TableId(value="id", type=IdType.AUTO) - private int id; - /** - * process definition id - */ - private int processDefinitionId; - - /** - * process definition name - */ - @TableField(exist = false) - private String processDefinitionName; - - /** - * project name - */ - @TableField(exist = false) - private String projectName; - - /** - * schedule description - */ - @TableField(exist = false) - private String definitionDescription; - - /** - * schedule start time - */ - private Date startTime; - - /** - * schedule end time - */ - private Date endTime; - - /** - * crontab expression - */ - private String crontab; - - /** - * failure strategy - */ - private FailureStrategy failureStrategy; - - /** - * warning type - */ - private WarningType warningType; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - /** - * created user id - */ - private int userId; - - /** - * created user name - */ - @TableField(exist = false) - private String userName; - - /** - * release state - */ - private ReleaseState releaseState; - - /** - * warning group id - */ - private int warningGroupId; - - - /** - * process instance priority - */ - private Priority processInstancePriority; - - /** - * worker group id - */ - private int workerGroupId; - - public int getWarningGroupId() { - return warningGroupId; - } - - public void setWarningGroupId(int warningGroupId) { - this.warningGroupId = warningGroupId; - } - - - - public Schedule() { - } - - public String getProjectName() { - return projectName; - } - - public void setProjectName(String projectName) { - this.projectName = projectName; - } - - - - public Date getStartTime() { - - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public Date getEndTime() { - return endTime; - } - - public void setEndTime(Date endTime) { - this.endTime = endTime; - } - - public String getCrontab() { - return crontab; - } - - public void setCrontab(String crontab) { - this.crontab = crontab; - } - - public FailureStrategy getFailureStrategy() { - return failureStrategy; - } - - public void setFailureStrategy(FailureStrategy failureStrategy) { - this.failureStrategy = failureStrategy; - } - - public WarningType getWarningType() { - return warningType; - } - - public void setWarningType(WarningType warningType) { - this.warningType = warningType; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - - public ReleaseState getReleaseState() { - return releaseState; - } - - public void setReleaseState(ReleaseState releaseState) { - this.releaseState = releaseState; - } - - - - public int getProcessDefinitionId() { - return processDefinitionId; - } - - public void setProcessDefinitionId(int processDefinitionId) { - this.processDefinitionId = processDefinitionId; - } - - public String getProcessDefinitionName() { - return processDefinitionName; - } - - public void setProcessDefinitionName(String processDefinitionName) { - this.processDefinitionName = processDefinitionName; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public Priority getProcessInstancePriority() { - return processInstancePriority; - } - - public void setProcessInstancePriority(Priority processInstancePriority) { - this.processInstancePriority = processInstancePriority; - } - - - public int getWorkerGroupId() { - return workerGroupId; - } - - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; - } - - @Override - public String toString() { - return "Schedule{" + - "id=" + id + - ", processDefinitionId=" + processDefinitionId + - ", processDefinitionName='" + processDefinitionName + '\'' + - ", projectName='" + projectName + '\'' + - ", description='" + definitionDescription + '\'' + - ", startTime=" + startTime + - ", endTime=" + endTime + - ", crontab='" + crontab + '\'' + - ", failureStrategy=" + failureStrategy + - ", warningType=" + warningType + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - ", userId=" + userId + - ", userName='" + userName + '\'' + - ", releaseState=" + releaseState + - ", warningGroupId=" + warningGroupId + - ", processInstancePriority=" + processInstancePriority + - ", workerGroupId=" + workerGroupId + - '}'; - } - - public String getDefinitionDescription() { - return definitionDescription; - } - - public void setDefinitionDescription(String definitionDescription) { - this.definitionDescription = definitionDescription; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Session.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Session.java deleted file mode 100644 index 90a2685d68..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Session.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * session - */ -@Data -@TableName("t_escheduler_session") -public class Session { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private String id; - - /** - * user id - */ - private int userId; - - /** - * last login time - */ - private Date lastLoginTime; - - /** - * user login ip - */ - private String ip; - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public String getIp() { - return ip; - } - - public void setIp(String ip) { - this.ip = ip; - } - - public Date getLastLoginTime() { - return lastLoginTime; - } - - public void setLastLoginTime(Date lastLoginTime) { - this.lastLoginTime = lastLoginTime; - } - - @Override - public String toString() { - return "Session{" + - "id=" + id + - ", userId=" + userId + - ", ip='" + ip + '\'' + - ", lastLoginTime=" + lastLoginTime + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/TaskInstance.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/TaskInstance.java deleted file mode 100644 index 1f65a18f39..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/TaskInstance.java +++ /dev/null @@ -1,515 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.common.enums.*; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * task instance - */ -@Data -@TableName("t_escheduler_task_instance") -public class TaskInstance { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * task name - */ - private String name; - - /** - * task type - */ - private String taskType; - - /** - * process definition id - */ - private int processDefinitionId; - - /** - * process instance id - */ - private int processInstanceId; - - /** - * process instance name - */ - private String processInstanceName; - - /** - * task json - */ - private String taskJson; - - /** - * state - */ - private ExecutionStatus state; - - /** - * task submit time - */ - private Date submitTime; - - /** - * task start time - */ - private Date startTime; - - /** - * task end time - */ - private Date endTime; - - /** - * task host - */ - private String host; - - /** - * task shell execute path and the resource down from hdfs - * default path: $base_run_dir/processInstanceId/taskInstanceId/retryTimes - */ - private String executePath; - - /** - * task log path - * default path: $base_run_dir/processInstanceId/taskInstanceId/retryTimes - */ - private String logPath; - - /** - * retry times - */ - private int retryTimes; - - /** - * alert flag - */ - private Flag alertFlag; - - /** - * run flag - */ - private Flag runFlag; - - /** - * process instance - */ - private ProcessInstance processInstance; - - /** - * process definition - */ - private ProcessDefinition processDefine; - - /** - * process id - */ - private int pid; - - /** - * appLink - */ - private String appLink; - - /** - * flag - */ - private Flag flag; - - /** - * dependency - */ - private String dependency; - - /** - * duration - * @return - */ - private Long duration; - - /** - * max retry times - * @return - */ - private int maxRetryTimes; - - /** - * task retry interval, unit: minute - * @return - */ - private int retryInterval; - - /** - * task intance priority - */ - private Priority taskInstancePriority; - - /** - * process intance priority - */ - private Priority processInstancePriority; - - /** - * dependent state - * @return - */ - private String dependentResult; - - - /** - * worker group id - * @return - */ - private int workerGroupId; - - - - public void init(String host,Date startTime,String executePath){ - this.host = host; - this.startTime = startTime; - this.executePath = executePath; - } - - - public ProcessInstance getProcessInstance() { - return processInstance; - } - - public void setProcessInstance(ProcessInstance processInstance) { - this.processInstance = processInstance; - } - - public ProcessDefinition getProcessDefine() { - return processDefine; - } - - public void setProcessDefine(ProcessDefinition processDefine) { - this.processDefine = processDefine; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getTaskType() { - return taskType; - } - - public void setTaskType(String taskType) { - this.taskType = taskType; - } - - public int getProcessDefinitionId() { - return processDefinitionId; - } - - public void setProcessDefinitionId(int processDefinitionId) { - this.processDefinitionId = processDefinitionId; - } - - public int getProcessInstanceId() { - return processInstanceId; - } - - public void setProcessInstanceId(int processInstanceId) { - this.processInstanceId = processInstanceId; - } - - public String getTaskJson() { - return taskJson; - } - - public void setTaskJson(String taskJson) { - this.taskJson = taskJson; - } - - public ExecutionStatus getState() { - return state; - } - - public void setState(ExecutionStatus state) { - this.state = state; - } - - public Date getSubmitTime() { - return submitTime; - } - - public void setSubmitTime(Date submitTime) { - this.submitTime = submitTime; - } - - public Date getStartTime() { - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public Date getEndTime() { - return endTime; - } - - public void setEndTime(Date endTime) { - this.endTime = endTime; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public String getExecutePath() { - return executePath; - } - - public void setExecutePath(String executePath) { - this.executePath = executePath; - } - - public String getLogPath() { - return logPath; - } - - public void setLogPath(String logPath) { - this.logPath = logPath; - } - - public Flag getAlertFlag() { - return alertFlag; - } - - public void setAlertFlag(Flag alertFlag) { - this.alertFlag = alertFlag; - } - - public int getRetryTimes() { - return retryTimes; - } - - public void setRetryTimes(int retryTimes) { - this.retryTimes = retryTimes; - } - - public Boolean isTaskSuccess(){ - return this.state == ExecutionStatus.SUCCESS; - } - - public int getPid() { - return pid; - } - - public void setPid(int pid) { - this.pid = pid; - } - - public String getAppLink() { - return appLink; - } - - public void setAppLink(String appLink) { - this.appLink = appLink; - } - - - public Boolean isSubProcess(){ - return TaskType.SUB_PROCESS.toString().equals(this.taskType.toUpperCase()); - } - - public String getDependency(){ - - if(this.dependency != null){ - return this.dependency; - } - TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class); - - return taskNode.getDependence(); - } - - public Flag getFlag() { - return flag; - } - - public void setFlag(Flag flag) { - this.flag = flag; - } - public String getProcessInstanceName() { - return processInstanceName; - } - - public void setProcessInstanceName(String processInstanceName) { - this.processInstanceName = processInstanceName; - } - - public Flag getRunFlag() { - return runFlag; - } - - public void setRunFlag(Flag runFlag) { - this.runFlag = runFlag; - } - - public Long getDuration() { - return duration; - } - - public void setDuration(Long duration) { - this.duration = duration; - } - - public int getMaxRetryTimes() { - return maxRetryTimes; - } - - public void setMaxRetryTimes(int maxRetryTimes) { - this.maxRetryTimes = maxRetryTimes; - } - - public int getRetryInterval() { - return retryInterval; - } - - public void setRetryInterval(int retryInterval) { - this.retryInterval = retryInterval; - } - - public Boolean isTaskComplete() { - - return this.getState().typeIsPause() - || this.getState().typeIsSuccess() - || this.getState().typeIsCancel() - || (this.getState().typeIsFailure() && !taskCanRetry()); - } - /** - * 判断是否可以重试 - * @return - */ - public boolean taskCanRetry() { - if(this.isSubProcess()){ - return false; - } - if(this.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE){ - return true; - }else { - return (this.getState().typeIsFailure() - && this.getRetryTimes() < this.getMaxRetryTimes()); - } - } - - public void setDependency(String dependency) { - this.dependency = dependency; - } - - public Priority getTaskInstancePriority() { - return taskInstancePriority; - } - - public void setTaskInstancePriority(Priority taskInstancePriority) { - this.taskInstancePriority = taskInstancePriority; - } - - public Priority getProcessInstancePriority() { - return processInstancePriority; - } - - public void setProcessInstancePriority(Priority processInstancePriority) { - this.processInstancePriority = processInstancePriority; - } - - public int getWorkerGroupId() { - return workerGroupId; - } - - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; - } - - @Override - public String toString() { - return "TaskInstance{" + - "id=" + id + - ", name='" + name + '\'' + - ", taskType='" + taskType + '\'' + - ", processDefinitionId=" + processDefinitionId + - ", processInstanceId=" + processInstanceId + - ", processInstanceName='" + processInstanceName + '\'' + - ", taskJson='" + taskJson + '\'' + - ", state=" + state + - ", submitTime=" + submitTime + - ", startTime=" + startTime + - ", endTime=" + endTime + - ", host='" + host + '\'' + - ", executePath='" + executePath + '\'' + - ", logPath='" + logPath + '\'' + - ", retryTimes=" + retryTimes + - ", alertFlag=" + alertFlag + - ", runFlag=" + runFlag + - ", processInstance=" + processInstance + - ", processDefine=" + processDefine + - ", pid=" + pid + - ", appLink='" + appLink + '\'' + - ", flag=" + flag + - ", dependency=" + dependency + - ", duration=" + duration + - ", maxRetryTimes=" + maxRetryTimes + - ", retryInterval=" + retryInterval + - ", taskInstancePriority=" + taskInstancePriority + - ", processInstancePriority=" + processInstancePriority + - ", workGroupId=" + workerGroupId + - '}'; - } - - public String getDependentResult() { - return dependentResult; - } - - public void setDependentResult(String dependentResult) { - this.dependentResult = dependentResult; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/TaskRecord.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/TaskRecord.java deleted file mode 100644 index 02b85dbf8b..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/TaskRecord.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import java.util.Date; - -/** - * task record for qianfan - */ -public class TaskRecord { - - /** - * id - */ - private int id; - - /** - * process id - */ - private int procId; - - /** - * procedure name - */ - private String procName; - - /** - * procedure date - */ - private String procDate; - - /** - * start date - */ - private Date startTime; - - /** - * end date - */ - private Date endTime; - - /** - * result - */ - private String result; - - /** - * duration unit: second - */ - private int duration; - - /** - * note - */ - private String note; - - /** - * schema - */ - private String schema; - - /** - * job id - */ - private String jobId; - - - /** - * source tab - */ - private String sourceTab; - - /** - * source row count - */ - private Long sourceRowCount; - - /** - * target tab - */ - private String targetTab; - - /** - * target row count - */ - private Long targetRowCount; - - /** - * error code - */ - private String errorCode; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getProcId() { - return procId; - } - - public void setProcId(int procId) { - this.procId = procId; - } - - public String getProcName() { - return procName; - } - - public void setProcName(String procName) { - this.procName = procName; - } - - public String getProcDate() { - return procDate; - } - - public void setProcDate(String procDate) { - this.procDate = procDate; - } - - public Date getStartTime() { - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public Date getEndTime() { - return endTime; - } - - public void setEndTime(Date endTime) { - this.endTime = endTime; - } - - public String getResult() { - return result; - } - - public void setResult(String result) { - this.result = result; - } - - public int getDuration() { - return duration; - } - - public void setDuration(int duration) { - this.duration = duration; - } - - public String getNote() { - return note; - } - - public void setNote(String note) { - this.note = note; - } - - public String getSchema() { - return schema; - } - - public void setSchema(String schema) { - this.schema = schema; - } - - public String getJobId() { - return jobId; - } - - public void setJobId(String jobId) { - this.jobId = jobId; - } - - public String getSourceTab() { - return sourceTab; - } - - public void setSourceTab(String sourceTab) { - this.sourceTab = sourceTab; - } - - public Long getSourceRowCount() { - return sourceRowCount; - } - - public void setSourceRowCount(Long sourceRowCount) { - this.sourceRowCount = sourceRowCount; - } - - public String getTargetTab() { - return targetTab; - } - - public void setTargetTab(String targetTab) { - this.targetTab = targetTab; - } - - public Long getTargetRowCount() { - return targetRowCount; - } - - public void setTargetRowCount(Long targetRowCount) { - this.targetRowCount = targetRowCount; - } - - public String getErrorCode() { - return errorCode; - } - - public void setErrorCode(String errorCode) { - this.errorCode = errorCode; - } - - @Override - public String toString(){ - return "task record, id:" + id - +" proc id:" + procId - + " proc name:" + procName - + " proc date: " + procDate - + " start date:" + startTime - + " end date:" + endTime - + " result : " + result - + " duration : " + duration - + " note : " + note - + " schema : " + schema - + " job id : " + jobId - + " source table : " + sourceTab - + " source row count: " + sourceRowCount - + " target table : " + targetTab - + " target row count: " + targetRowCount - + " error code: " + errorCode - ; - } - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Tenant.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Tenant.java deleted file mode 100644 index f4db4d6068..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/Tenant.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * tenant - */ -@Data -@TableName("t_escheduler_tenant") -public class Tenant { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * tenant code - */ - private String tenantCode; - - /** - * tenant name - */ - private String tenantName; - - /** - * description - */ - private String description; - - /** - * queue id - */ - private int queueId; - - /** - * queue name - */ - @TableField(exist = false) - private String queueName; - - /** - * queue - */ - @TableField(exist = false) - private String queue; - - /** - * create time - */ - private Date createTime; - /** - * update time - */ - private Date updateTime; - - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getTenantCode() { - return tenantCode; - } - - public void setTenantCode(String tenantCode) { - this.tenantCode = tenantCode; - } - - public String getTenantName() { - return tenantName; - } - - public void setTenantName(String tenantName) { - this.tenantName = tenantName; - } - - public int getQueueId() { - return queueId; - } - - public void setQueueId(int queueId) { - this.queueId = queueId; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public String getQueueName() { - return queueName; - } - - public void setQueueName(String queueName) { - this.queueName = queueName; - } - - public String getQueue() { - return queue; - } - - public void setQueue(String queue) { - this.queue = queue; - } - - @Override - public String toString() { - return "Tenant{" + - "id=" + id + - ", tenantCode='" + tenantCode + '\'' + - ", tenantName='" + tenantName + '\'' + - ", queueId=" + queueId + - ", queueName='" + queueName + '\'' + - ", queue='" + queue + '\'' + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/UDFUser.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/UDFUser.java deleted file mode 100644 index 2d8c6a4f87..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/UDFUser.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * udf user relation - */ -@Data -@TableName("t_escheduler_relation_udfs_user") -public class UDFUser { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * id - */ - private int userId; - - /** - * udf id - */ - private int udfId; - - /** - * permission - */ - private int perm; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public int getUdfId() { - return udfId; - } - - public void setUdfId(int udfId) { - this.udfId = udfId; - } - - public int getPerm() { - return perm; - } - - public void setPerm(int perm) { - this.perm = perm; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - @Override - public String toString() { - return "UDFUser{" + - "id=" + id + - ", userId=" + userId + - ", udfId=" + udfId + - ", perm=" + perm + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/UdfFunc.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/UdfFunc.java deleted file mode 100644 index c34f72f9e0..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/UdfFunc.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import cn.escheduler.common.enums.UdfType; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * udf function - */ -@Data -@TableName("t_escheduler_udfs") -public class UdfFunc { - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - /** - * user id - */ - private int userId; - - /** - * udf function name - */ - private String funcName; - - /** - * udf class name - */ - private String className; - - /** - * udf argument types - */ - private String argTypes; - - /** - * udf data base - */ - private String database; - - /** - * udf description - */ - private String desc; - - /** - * resource id - */ - private int resourceId; - - /** - * resource name - */ - private String resourceName; - - /** - * udf function type: hive / spark - */ - private UdfType type; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public String getFuncName() { - return funcName; - } - - public void setFuncName(String funcName) { - this.funcName = funcName; - } - - public String getClassName() { - return className; - } - - public void setClassName(String className) { - this.className = className; - } - - public String getArgTypes() { - return argTypes; - } - - public void setArgTypes(String argTypes) { - this.argTypes = argTypes; - } - - public String getDatabase() { - return database; - } - - public void setDatabase(String database) { - this.database = database; - } - - public String getDesc() { - return desc; - } - - public void setDesc(String desc) { - this.desc = desc; - } - - public int getResourceId() { - return resourceId; - } - - public void setResourceId(int resourceId) { - this.resourceId = resourceId; - } - - public String getResourceName() { - return resourceName; - } - - public void setResourceName(String resourceName) { - this.resourceName = resourceName; - } - - public UdfType getType() { - return type; - } - - public void setType(UdfType type) { - this.type = type; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - @Override - public String toString() { - return "UdfFunc{" + - "id=" + id + - ", userId=" + userId + - ", funcName='" + funcName + '\'' + - ", className='" + className + '\'' + - ", argTypes='" + argTypes + '\'' + - ", database='" + database + '\'' + - ", desc='" + desc + '\'' + - ", resourceId=" + resourceId + - ", resourceName='" + resourceName + '\'' + - ", type=" + type + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - UdfFunc udfFunc = (UdfFunc) o; - - if (id != udfFunc.id) { - return false; - } - return !(funcName != null ? !funcName.equals(udfFunc.funcName) : udfFunc.funcName != null); - - } - - @Override - public int hashCode() { - int result = id; - result = 31 * result + (funcName != null ? funcName.hashCode() : 0); - return result; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/User.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/User.java deleted file mode 100644 index e763561e37..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/User.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - - -import cn.escheduler.common.enums.UserType; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import lombok.Data; - -import java.util.Date; - -/** - * user - */ -@Data -@TableName("t_escheduler_user") -@ApiModel(description = "UserModelDesc") -public class User { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * user name - */ - @ApiModelProperty(name = "userName", notes = "USER_NAME",dataType = "String",required = true) - private String userName; - - /** - * user password - */ - @ApiModelProperty(name = "userPassword", notes = "USER_PASSWORD",dataType = "String",required = true) - private String userPassword; - - /** - * mail - */ - private String email; - - /** - * phone - */ - private String phone; - - /** - * user type - */ - private UserType userType; - - /** - * tenant id - */ - private int tenantId; - - /** - * tenant code - */ - @TableField(exist = false) - private String tenantCode; - - /** - * tenant name - */ - @TableField(exist = false) - private String tenantName; - - /** - * queue name - */ - @TableField(exist = false) - private String queueName; - - /** - * alert group - */ - @TableField(exist = false) - private String alertGroup; - - /** - * user specified queue - */ - private String queue; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public String getUserPassword() { - return userPassword; - } - - public void setUserPassword(String userPassword) { - this.userPassword = userPassword; - } - - public String getEmail() { - return email; - } - - public void setEmail(String email) { - this.email = email; - } - - public UserType getUserType() { - return userType; - } - - public void setUserType(UserType userType) { - this.userType = userType; - } - - public int getTenantId() { - return tenantId; - } - - public void setTenantId(int tenantId) { - this.tenantId = tenantId; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - - public String getPhone() { - return phone; - } - - public void setPhone(String phone) { - this.phone = phone; - } - - public String getQueueName() { - return queueName; - } - - public void setQueueName(String queueName) { - this.queueName = queueName; - } - - public String getAlertGroup() { - return alertGroup; - } - - public void setAlertGroup(String alertGroup) { - this.alertGroup = alertGroup; - } - - public String getTenantName() { - return tenantName; - } - - public void setTenantName(String tenantName) { - this.tenantName = tenantName; - } - - public String getTenantCode() { - return tenantCode; - } - - public void setTenantCode(String tenantCode) { - this.tenantCode = tenantCode; - } - - public String getQueue() { - return queue; - } - - public void setQueue(String queue) { - this.queue = queue; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - User user = (User) o; - - if (id != user.id) { - return false; - } - return userName.equals(user.userName); - - } - - @Override - public int hashCode() { - int result = id; - result = 31 * result + userName.hashCode(); - return result; - } - - @Override - public String toString() { - return "User{" + - "id=" + id + - ", userName='" + userName + '\'' + - ", userPassword='" + userPassword + '\'' + - ", email='" + email + '\'' + - ", phone='" + phone + '\'' + - ", userType=" + userType + - ", tenantId=" + tenantId + - ", tenantCode='" + tenantCode + '\'' + - ", tenantName='" + tenantName + '\'' + - ", queueName='" + queueName + '\'' + - ", alertGroup='" + alertGroup + '\'' + - ", queue='" + queue + '\'' + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/UserAlertGroup.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/UserAlertGroup.java deleted file mode 100644 index 293aa370e8..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/UserAlertGroup.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * user alert group - */ -@Data -@TableName("t_escheduler_relation_user_alertgroup") -public class UserAlertGroup { - - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * id - */ - @TableField("alertgroup_id") - private int alertgroupId; - - /** - * alert group name - */ - @TableField(exist = false) - private String alertgroupName; - - /** - * user id - */ - private int userId; - - /** - * user name - */ - private String userName; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getAlertgroupId() { - return alertgroupId; - } - - public void setAlertgroupId(int alertgroupId) { - this.alertgroupId = alertgroupId; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public String getAlertgroupName() { - return alertgroupName; - } - - public void setAlertgroupName(String alertgroupName) { - this.alertgroupName = alertgroupName; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - @Override - public String toString() { - return "UserAlertGroup{" + - "id=" + id + - ", alertgroupId=" + alertgroupId + - ", alertgroupName='" + alertgroupName + '\'' + - ", userId=" + userId + - ", userName='" + userName + '\'' + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/WorkerGroup.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/WorkerGroup.java deleted file mode 100644 index 6f4c689cf7..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/WorkerGroup.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import lombok.Data; - -import java.util.Date; - -/** - * worker group for task running - */ -@Data -@TableName("t_escheduler_worker_group") -public class WorkerGroup { - - @TableId(value="id", type=IdType.AUTO) - private int id; - - private String name; - - private String ipList; - - private Date createTime; - - private Date updateTime; - - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getIpList() { - return ipList; - } - - public void setIpList(String ipList) { - this.ipList = ipList; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - @Override - public String toString() { - return "Worker group model{" + - "id= " + id + - ",name= " + name + - ",ipList= " + ipList + - ",createTime= " + createTime + - ",updateTime= " + updateTime + - - "}"; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/WorkerServer.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/WorkerServer.java deleted file mode 100644 index a15e5da033..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/WorkerServer.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import java.util.Date; - -public class WorkerServer { - - /** - * id - */ - private int id; - - /** - * host - */ - private String host; - - /** - * port - */ - private int port; - - - /** - * zookeeper directory - */ - private String zkDirectory; - - /** - * resource info - */ - private String resInfo; - - /** - * create time - */ - private Date createTime; - - /** - * last heart beat time - */ - private Date lastHeartbeatTime; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public String getZkDirectory() { - return zkDirectory; - } - - public void setZkDirectory(String zkDirectory) { - this.zkDirectory = zkDirectory; - } - - public Date getLastHeartbeatTime() { - return lastHeartbeatTime; - } - - public void setLastHeartbeatTime(Date lastHeartbeatTime) { - this.lastHeartbeatTime = lastHeartbeatTime; - } - - public String getResInfo() { - return resInfo; - } - - public void setResInfo(String resInfo) { - this.resInfo = resInfo; - } - - @Override - public String toString() { - return "WorkerServer{" + - "id=" + id + - ", host='" + host + '\'' + - ", port=" + port + - ", zkDirectory='" + zkDirectory + '\'' + - ", resInfo='" + resInfo + '\'' + - ", createTime=" + createTime + - ", lastHeartbeatTime=" + lastHeartbeatTime + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ZookeeperRecord.java b/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ZookeeperRecord.java deleted file mode 100644 index cb0a9104c6..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/entity/ZookeeperRecord.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.entity; - -import java.util.Date; - -/** - * monitor record for zookeeper - */ -public class ZookeeperRecord { - - /** - * hostname - */ - private String hostname; - - /** - * connections - */ - private int connections; - - /** - * max connections - */ - private int watches; - - /** - * sent - */ - private long sent; - - /** - * received - */ - private long received; - - /** - * mode: leader or follower - */ - private String mode; - - /** - * min Latency - */ - private int minLatency; - - /** - * avg Latency - */ - private int avgLatency; - - /** - * max Latency - */ - private int maxLatency; - - /** - * node count - */ - private int nodeCount; - - /** - * date - */ - private Date date; - - - /** - * is normal or not, 1:normal - */ - private int state; - - - public ZookeeperRecord(String hostname,int connections, int watches, long sent, long received, String mode, int minLatency, int avgLatency, int maxLatency, int nodeCount, int state,Date date) { - this.hostname = hostname; - this.connections = connections; - this.watches = watches; - this.sent = sent; - this.received = received; - this.mode = mode; - this.minLatency = minLatency; - this.avgLatency = avgLatency; - this.maxLatency = maxLatency; - this.nodeCount = nodeCount; - this.state = state; - this.date = date; - } - - - public String getHostname() { - return hostname; - } - - public void setHostname(String hostname) { - this.hostname = hostname; - } - - public int getConnections() { - return connections; - } - - public void setConnections(int connections) { - this.connections = connections; - } - - public int getWatches() { - return watches; - } - - public void setWatches(int watches) { - this.watches = watches; - } - - public long getSent() { - return sent; - } - - public void setSent(long sent) { - this.sent = sent; - } - - public long getReceived() { - return received; - } - - public void setReceived(long received) { - this.received = received; - } - - public String getMode() { - return mode; - } - - public void setMode(String mode) { - this.mode = mode; - } - - public int getMinLatency() { - return minLatency; - } - - public void setMinLatency(int minLatency) { - this.minLatency = minLatency; - } - - public int getAvgLatency() { - return avgLatency; - } - - public void setAvgLatency(int avgLatency) { - this.avgLatency = avgLatency; - } - - public int getMaxLatency() { - return maxLatency; - } - - public void setMaxLatency(int maxLatency) { - this.maxLatency = maxLatency; - } - - public int getNodeCount() { - return nodeCount; - } - - public void setNodeCount(int nodeCount) { - this.nodeCount = nodeCount; - } - - public int getState() { - return state; - } - - public void setState(int state) { - this.state = state; - } - - public Date getDate() { - return date; - } - - public void setDate(Date date) { - this.date = date; - } - - @Override - public String toString() { - return "ZookeeperRecord{" + - "hostname='" + hostname + '\'' + - ", connections=" + connections + - ", watches=" + watches + - ", sent=" + sent + - ", received=" + received + - ", mode='" + mode + '\'' + - ", minLatency=" + minLatency + - ", avgLatency=" + avgLatency + - ", maxLatency=" + maxLatency + - ", nodeCount=" + nodeCount + - ", date=" + date + - ", state=" + state + - '}'; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AccessTokenMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AccessTokenMapper.java deleted file mode 100644 index 00d18d66ff..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AccessTokenMapper.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.AccessToken; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.ibatis.annotations.Param; - -public interface AccessTokenMapper extends BaseMapper { - - /** - * - * @param page - * @param userName - * @param userId - * @return - */ - IPage selectAccessTokenPage(Page page, - @Param("userName") String userName, - @Param("userId") int userId - ); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AlertGroupMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AlertGroupMapper.java deleted file mode 100644 index 975b770d77..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AlertGroupMapper.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.common.enums.AlertType; -import cn.escheduler.dao.entity.AlertGroup; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - - -public interface AlertGroupMapper extends BaseMapper { - - - IPage queryAlertGroupPage(Page page, - @Param("groupName") String groupName); - - - List queryByGroupName(@Param("groupName") String groupName); - - - List queryByUserId(@Param("userId") int userId); - - - List queryByAlertType(@Param("alertType") AlertType alertType); - - List queryAllGroupList(); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AlertMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AlertMapper.java deleted file mode 100644 index c9ef374bbb..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/AlertMapper.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.common.enums.AlertStatus; -import cn.escheduler.dao.entity.Alert; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface AlertMapper extends BaseMapper { - - - List listAlertByStatus(@Param("alertStatus") AlertStatus alertStatus); - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/CommandMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/CommandMapper.java deleted file mode 100644 index ebf45bdd04..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/CommandMapper.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.Command; -import cn.escheduler.dao.entity.CommandCount; -import com.baomidou.mybatisplus.core.conditions.Wrapper; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.toolkit.Constants; -import org.apache.ibatis.annotations.Param; -import org.apache.ibatis.annotations.Select; - -import java.util.Date; -import java.util.List; - -public interface CommandMapper extends BaseMapper { - - - - @Select("select * from t_escheduler_command ${ew.customSqlSegment}") - List getAll(@Param(Constants.WRAPPER) Wrapper wrapper); - - Command getOneToRun(); - - List countCommandState( - @Param("userId") int userId, - @Param("startTime") Date startTime, - @Param("endTime") Date endTime, - @Param("projectIdArray") Integer[] projectIdArray); - - - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceMapper.java deleted file mode 100644 index fc88c1edc8..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceMapper.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.DataSource; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface DataSourceMapper extends BaseMapper { - - List queryDataSourceByType(@Param("userId") int userId, @Param("type") Integer type); - - IPage selectPaging(IPage page, - @Param("userId") int userId, - @Param("name") String name); - - List queryDataSourceByName(@Param("name") String name); - - - List queryAuthedDatasource(@Param("userId") int userId); - - List queryDatasourceExceptUserId(@Param("userId") int userId); - - List listAllDataSourceByType(@Param("type") Integer type); - - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceUserMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceUserMapper.java deleted file mode 100644 index 61a57936ce..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceUserMapper.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.DatasourceUser; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import org.apache.ibatis.annotations.Param; - -public interface DataSourceUserMapper extends BaseMapper { - - - int deleteByUserId(@Param("userId") int userId); - - int deleteByDatasourceId(@Param("datasourceId") int datasourceId); - - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ErrorCommandMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ErrorCommandMapper.java deleted file mode 100644 index c3675726b1..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ErrorCommandMapper.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.CommandCount; -import cn.escheduler.dao.entity.ErrorCommand; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import org.apache.ibatis.annotations.Param; - -import java.util.Date; -import java.util.List; - -public interface ErrorCommandMapper extends BaseMapper { - - List countCommandState( - @Param("startTime") Date startTime, - @Param("endTime") Date endTime, - @Param("projectIdArray") Integer[] projectIdArray); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapper.java deleted file mode 100644 index e8029f8b34..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapper.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.common.enums.Flag; -import cn.escheduler.common.enums.ReleaseState; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.DefinitionGroupByUser; -import cn.escheduler.dao.entity.ProcessDefinition; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface ProcessDefinitionMapper extends BaseMapper { - - - ProcessDefinition queryByDefineName(@Param("projectId") int projectId, - @Param("processDefinitionName") String name); - - IPage queryDefineListPaging(IPage page, - @Param("searchVal") String searchVal, - @Param("userId") int userId, - @Param("projectId") int projectId); - - List queryAllDefinitionList(@Param("projectId") int projectId); - - List queryDefinitionListByIdList(@Param("ids") Integer[] ids); - - List countDefinitionGroupByUser( - @Param("userId") Integer userId, - @Param("projectIds") Integer[] projectIds); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapMapper.java deleted file mode 100644 index cfa2c7619c..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapMapper.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.ProcessInstanceMap; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface ProcessInstanceMapMapper extends BaseMapper { - - - - ProcessInstanceMap queryByParentId(@Param("parentProcessId") int parentProcessId, @Param("parentTaskId") int parentTaskId); - - - ProcessInstanceMap queryBySubProcessId(@Param("subProcessId") Integer subProcessId); - - int deleteByParentProcessId(@Param("parentProcessId") int parentProcessId); - - List querySubIdListByParentId(@Param("parentInstanceId") int parentInstanceId); - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java deleted file mode 100644 index 075412287a..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.common.enums.*; -import cn.escheduler.dao.entity.ExecuteStatusCount; -import cn.escheduler.dao.entity.ProcessInstance; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.ibatis.annotations.Param; - -import java.util.Date; -import java.util.List; - -public interface ProcessInstanceMapper extends BaseMapper { - - ProcessInstance queryDetailById(@Param("processId") int processId); - - List queryByHostAndStatus(@Param("host") String host, - @Param("states") int[] stateArray); - - IPage queryProcessInstanceListPaging(Page page, - @Param("projectId") int projectId, - @Param("processDefinitionId") Integer processDefinitionId, - @Param("searchVal") String searchVal, - @Param("states") int[] statusArray, - @Param("host") String host, - @Param("startTime") Date startTime, - @Param("endTime") Date endTime - ); - - int setFailoverByHostAndStateArray(@Param("host") String host, - @Param("states") int[] stateArray); - - int updateProcessInstanceByState(@Param("originState") ExecutionStatus originState, - @Param("destState") ExecutionStatus destState); - - List countInstanceStateByUser( - @Param("startTime") Date startTime, - @Param("endTime") Date endTime, - @Param("projectIds") Integer[] projectIds); - - List queryByProcessDefineId( - @Param("processDefinitionId") int processDefinitionId, - @Param("size") int size); - - ProcessInstance queryLastSchedulerProcess(@Param("processDefinitionId") int definitionId, - @Param("startTime") String startTime, - @Param("endTime") String endTime); - - ProcessInstance queryLastRunningProcess(@Param("processDefinitionId") int definitionId, - @Param("startTime") String startTime, - @Param("endTime") String endTime, - @Param("states") int[] stateArray); - - ProcessInstance queryLastManualProcess(@Param("processDefinitionId") int definitionId, - @Param("startTime") Date startTime, - @Param("endTime") Date endTime); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapper.java deleted file mode 100644 index bd069f789e..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapper.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.Project; -import com.amazonaws.services.dynamodbv2.model.transform.ProjectionJsonUnmarshaller; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface ProjectMapper extends BaseMapper { - - Project queryDetailById(@Param("projectId") int projectId); - - Project queryByName(@Param("projectName") String projectName); - - IPage queryProjectListPaging(IPage page, - @Param("userId") int userId, - @Param("searchName") String searchName); - - IPage queryAllProjectListPaging(IPage page, - @Param("searchName") String searchName); - - List queryProjectCreatedByUser(@Param("userId") int userId); - - List queryAuthedProjectListByUserId(@Param("userId") int userId); - - List queryProjectExceptUserId(@Param("userId") int userId); - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectUserMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectUserMapper.java deleted file mode 100644 index a8bf4ac3c5..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectUserMapper.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.ProjectUser; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import org.apache.ibatis.annotations.Param; - -public interface ProjectUserMapper extends BaseMapper { - - int deleteProjectRelation(@Param("projectId") int projectId, - @Param("userId") int userId); - - ProjectUser queryProjectRelation(@Param("projectId") int projectId, - @Param("userId") int userId); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/QueueMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/QueueMapper.java deleted file mode 100644 index 02e93a5ff5..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/QueueMapper.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.Queue; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -/** - * - */ -public interface QueueMapper extends BaseMapper { - - IPage queryQueuePaging(IPage page, - @Param("searchVal") String searchVal); - - List queryAllQueueList(@Param("queue") String queue, - @Param("queueName") String queueName); - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceMapper.java deleted file mode 100644 index bb2f25138c..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceMapper.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.Resource; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface ResourceMapper extends BaseMapper { - - /** - * - * @param alias query all if null - * @param userId query all if -1 - * @param type query all type if -1 - * @return - */ - List queryResourceList(@Param("alias") String alias, - @Param("userId") int userId, - @Param("type") int type); - - - /** - * - * @param page - * @param userId query all if -1, then query the authed resources - * @param type - * @param searchVal - * @return - */ - IPage queryResourcePaging(IPage page, - @Param("userId") int userId, - @Param("type") int type, - @Param("searchVal") String searchVal); - - /** - * - * @param userId - * @param type - * @return - */ - List queryResourceListAuthored(@Param("userId") int userId, @Param("type") int type); - - /** - * - * @param userId - * @return - */ - List queryAuthorizedResourceList(@Param("userId") int userId); - - List queryResourceExceptUserId(@Param("userId") int userId); - - - String queryTenantCodeByResourceName(@Param("resName") String resName); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceUserMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceUserMapper.java deleted file mode 100644 index bf74bda905..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ResourceUserMapper.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.ResourcesUser; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import org.apache.ibatis.annotations.Param; - -public interface ResourceUserMapper extends BaseMapper { - - int deleteResourceUser(@Param("userId") int userId, - @Param("resourceId") int resourceId); - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java deleted file mode 100644 index b3c46bc2b1..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.Schedule; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface ScheduleMapper extends BaseMapper { - - - IPage queryByProcessDefineIdPaging(IPage page, - @Param("processDefinitionId") int processDefinitionId, - @Param("searchVal") String searchVal); - List querySchedulerListByProjectName(@Param("projectName") String projectName); - - - List selectAllByProcessDefineArray(@Param("processDefineIds") int[] processDefineIds); - - List queryByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId); - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/SessionMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/SessionMapper.java deleted file mode 100644 index 1d17cd6527..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/SessionMapper.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.Session; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface SessionMapper extends BaseMapper { - - List queryByUserId(@Param("userId") int userId); - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapper.java deleted file mode 100644 index 9e81d8d9c6..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TaskInstanceMapper.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.common.enums.*; -import cn.escheduler.dao.entity.ExecuteStatusCount; -import cn.escheduler.dao.entity.TaskInstance; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.Date; -import java.util.List; - -public interface TaskInstanceMapper extends BaseMapper { - - - List queryTaskByProcessIdAndState(@Param("processInstanceId") Integer processInstanceId, - @Param("state") Integer state); - - - TaskInstance queryById(@Param("taskInstanceId") int taskInstanceId); - - List findValidTaskListByProcessId(@Param("processInstanceId") Integer processInstanceId, - @Param("flag") Flag flag); - - List queryByHostAndStatus(@Param("host") String host, - @Param("states") String stateArray); - - int setFailoverByHostAndStateArray(@Param("host") String host, - @Param("states") String stateArray, - @Param("destStatus") ExecutionStatus destStatus); - - TaskInstance queryByInstanceIdAndName(@Param("processInstanceId") int processInstanceId, - @Param("name") String name); - - Integer countTask(@Param("userId") int userId, - @Param("userType") UserType userType, - @Param("projectIds") int[] projectIds, - @Param("taskIds") int[] taskIds); - - List countTaskInstanceStateByUser(@Param("userId") int userId, - @Param("userType") UserType userType, - @Param("startTime") Date startTime, - @Param("endTime") Date endTime, - @Param("projectIds") String projectIds); - - IPage queryTaskInstanceListPaging(IPage page, - @Param("projectId") int projectId, - @Param("processInstanceId") Integer processInstanceId, - @Param("searchVal") String searchVal, - @Param("taskName") String taskName, - @Param("states") String statusArray, - @Param("host") String host, - @Param("startTime") Date startTime, - @Param("endTime") Date endTime - ); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TenantMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TenantMapper.java deleted file mode 100644 index b590edb4d9..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TenantMapper.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.Tenant; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface TenantMapper extends BaseMapper { - - Tenant queryById(@Param("tenantId") int tenantId); - - List queryByTenantCode(@Param("tenantCode") String tenantCode); - - IPage queryTenantPaging(IPage page, - @Param("searchVal") String searchVal); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UDFUserMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UDFUserMapper.java deleted file mode 100644 index 919c3e0a32..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UDFUserMapper.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.UDFUser; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import org.apache.ibatis.annotations.Param; - -public interface UDFUserMapper extends BaseMapper { - - int deleteByUserId(@Param("userId") int userId); - - int deleteByUdfFuncId(@Param("udfFuncId") int udfFuncId); - -} - diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UdfFuncMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UdfFuncMapper.java deleted file mode 100644 index 02614a62b3..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UdfFuncMapper.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.UdfFunc; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface UdfFuncMapper extends BaseMapper { - - - List queryUdfByIdStr(@Param("ids") String ids, - @Param("funcNames") String funcNames); - - IPage queryUdfFuncPaging(IPage page, - @Param("userId") int userId, - @Param("searchVal") String searchVal); - - List getUdfFuncByType(@Param("userId") int userId, - @Param("type") Integer type); - - List queryUdfFuncExceptUserId(@Param("userId") int userId); - - List queryAuthedUdfFunc(@Param("userId") int userId); - - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserAlertGroupMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserAlertGroupMapper.java deleted file mode 100644 index 6c8215e24d..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserAlertGroupMapper.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.User; -import cn.escheduler.dao.entity.UserAlertGroup; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface UserAlertGroupMapper extends BaseMapper { - - List queryForUser(@Param("alertgroupId") int alertgroupId); - - int deleteByAlertgroupId(@Param("alertgroupId") int alertgroupId); - - List listUserByAlertgroupId(@Param("alertgroupId") int alertgroupId); - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java deleted file mode 100644 index 240a3221ab..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.User; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface UserMapper extends BaseMapper { - - List queryAllGeneralUser(); - - User queryByUserNameAccurately(@Param("userName") String userName); - - User queryUserByNamePassword(@Param("userName") String userName, @Param("password") String password); - - - IPage queryUserPaging(Page page, - @Param("userName") String userName); - - User queryDetailsById(@Param("userId") int userId); - - List queryUserListByAlertGroupId(@Param("alertgroupId") int alertgroupId); - - - User queryTenantCodeByUserId(@Param("userId") int userId); - - User queryUserByToken(@Param("token") String token); - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerGroupMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerGroupMapper.java deleted file mode 100644 index 07929b95e9..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/WorkerGroupMapper.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.WorkerGroup; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -public interface WorkerGroupMapper extends BaseMapper { - - List queryAllWorkerGroup(); - - List queryWorkerGroupByName(@Param("name") String name); - - IPage queryListPaging(IPage page, - @Param("searchVal") String searchVal); - -} - diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/DolphinSchedulerManager.java b/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/DolphinSchedulerManager.java deleted file mode 100644 index f15312a3f3..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/DolphinSchedulerManager.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.upgrade; - -import cn.escheduler.common.enums.DbType; -import cn.escheduler.common.utils.SchemaUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -/** - * upgrade manager - */ -public class DolphinSchedulerManager { - private static final Logger logger = LoggerFactory.getLogger(DolphinSchedulerManager.class); - UpgradeDao upgradeDao; - - private void initUpgradeDao() { - DbType dbType = UpgradeDao.getDbType(); - if (dbType != null) { - switch (dbType) { - case MYSQL: - upgradeDao = MysqlUpgradeDao.getInstance(); - break; - case POSTGRESQL: - upgradeDao = PostgresqlUpgradeDao.getInstance(); - break; - default: - logger.error("not support sql type: {},can't upgrade", dbType); - throw new IllegalArgumentException("not support sql type,can't upgrade"); - } - } - } - - public DolphinSchedulerManager() { - initUpgradeDao(); - } - - public void initDolphinScheduler() { - // Determines whether the escheduler table structure has been init - if(upgradeDao.isExistsTable("t_escheduler_version") || upgradeDao.isExistsTable("t_escheduler_queue")) { - logger.info("The database has been initialized. Skip the initialization step"); - return; - } - this.initDolphinSchedulerSchema(); - } - - public void initDolphinSchedulerSchema() { - - logger.info("Start initializing the DolphinScheduler manager table structure"); - upgradeDao.initSchema(); - } - - - /** - * upgrade DolphinScheduler - */ - public void upgradeDolphinScheduler() throws Exception{ - - // Gets a list of all upgrades - List schemaList = SchemaUtils.getAllSchemaList(); - if(schemaList == null || schemaList.size() == 0) { - logger.info("There is no schema to upgrade!"); - }else { - - String version = ""; - // Gets the version of the current system - if (upgradeDao.isExistsTable("t_escheduler_version")) { - version = upgradeDao.getCurrentVersion("t_escheduler_version"); - }else if(upgradeDao.isExistsTable("t_dolphinscheduler_version")){ - version = upgradeDao.getCurrentVersion("t_dolphinscheduler_version"); - }else if(upgradeDao.isExistsColumn("t_escheduler_queue","create_time")){ - version = "1.0.1"; - }else if(upgradeDao.isExistsTable("t_escheduler_queue")){ - version = "1.0.0"; - }else{ - logger.error("Unable to determine current software version, so cannot upgrade"); - throw new RuntimeException("Unable to determine current software version, so cannot upgrade"); - } - // The target version of the upgrade - String schemaVersion = ""; - for(String schemaDir : schemaList) { - schemaVersion = schemaDir.split("_")[0]; - if(SchemaUtils.isAGreatVersion(schemaVersion , version)) { - - logger.info("upgrade DolphinScheduler metadata version from " + version + " to " + schemaVersion); - - logger.info("Begin upgrading DolphinScheduler's table structure"); - upgradeDao.upgradeDolphinScheduler(schemaDir); - version = schemaVersion; - } - - } - } - - // Assign the value of the version field in the version table to the version of the product - upgradeDao.updateVersion(SchemaUtils.getSoftVersion()); - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/MysqlUpgradeDao.java b/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/MysqlUpgradeDao.java deleted file mode 100644 index d6a8974d28..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/MysqlUpgradeDao.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.upgrade; - -import cn.escheduler.common.utils.ConnectionUtils; -import cn.escheduler.dao.datasource.ConnectionFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; - -public class MysqlUpgradeDao extends UpgradeDao { - - public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); - - @Override - protected void init() { - - } - - private static class MysqlUpgradeDaoHolder { - private static final MysqlUpgradeDao INSTANCE = new MysqlUpgradeDao(); - } - - private MysqlUpgradeDao() { - } - - public static final MysqlUpgradeDao getInstance() { - return MysqlUpgradeDaoHolder.INSTANCE; - } - - - /** - * Determines whether a table exists - * @param tableName - * @return - */ - public boolean isExistsTable(String tableName) { - ResultSet rs = null; - Connection conn = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - rs = conn.getMetaData().getTables(null, null, tableName, null); - if (rs.next()) { - return true; - } else { - return false; - } - - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } finally { - ConnectionUtils.releaseResource(rs, null, conn); - } - - } - - /** - * Determines whether a field exists in the specified table - * @param tableName - * @param columnName - * @return - */ - public boolean isExistsColumn(String tableName,String columnName) { - Connection conn = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - ResultSet rs = conn.getMetaData().getColumns(null,null,tableName,columnName); - if (rs.next()) { - return true; - } else { - return false; - } - - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } finally { - ConnectionUtils.releaseResource(null, null, conn); - } - - } - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/PostgresqlUpgradeDao.java b/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/PostgresqlUpgradeDao.java deleted file mode 100644 index 87ac1b4bde..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/PostgresqlUpgradeDao.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.upgrade; - -import cn.escheduler.common.utils.ConnectionUtils; -import cn.escheduler.dao.datasource.ConnectionFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; - -public class PostgresqlUpgradeDao extends UpgradeDao { - - public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); - private static final String schema = getSchema(); - - @Override - protected void init() { - - } - - private static class PostgresqlUpgradeDaoHolder { - private static final PostgresqlUpgradeDao INSTANCE = new PostgresqlUpgradeDao(); - } - - private PostgresqlUpgradeDao() { - } - - public static final PostgresqlUpgradeDao getInstance() { - return PostgresqlUpgradeDaoHolder.INSTANCE; - } - - - @Override - public void initSchema(String initSqlPath) { - super.initSchema(initSqlPath); - } - - public static String getSchema(){ - Connection conn = null; - PreparedStatement pstmt = null; - ResultSet resultSet = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - pstmt = conn.prepareStatement("select current_schema()"); - resultSet = pstmt.executeQuery(); - while (resultSet.next()){ - if(resultSet.isFirst()){ - return resultSet.getString(1); - } - } - - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } finally { - ConnectionUtils.releaseResource(resultSet, pstmt, conn); - } - return ""; - } - - /** - * Determines whether a table exists - * @param tableName - * @return - */ - public boolean isExistsTable(String tableName) { - Connection conn = null; - ResultSet rs = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - - rs = conn.getMetaData().getTables(null, schema, tableName, null); - if (rs.next()) { - return true; - } else { - return false; - } - - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } finally { - ConnectionUtils.releaseResource(rs, null, conn); - } - - } - - /** - * Determines whether a field exists in the specified table - * @param tableName - * @param columnName - * @return - */ - public boolean isExistsColumn(String tableName,String columnName) { - Connection conn = null; - ResultSet rs = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - rs = conn.getMetaData().getColumns(null,schema,tableName,columnName); - if (rs.next()) { - return true; - } else { - return false; - } - - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } finally { - ConnectionUtils.releaseResource(rs, null, conn); - - } - - } - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/UpgradeDao.java b/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/UpgradeDao.java deleted file mode 100644 index 820d8a3c99..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/UpgradeDao.java +++ /dev/null @@ -1,359 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.upgrade; - -import cn.escheduler.common.enums.DbType; -import cn.escheduler.common.utils.ConnectionUtils; -import cn.escheduler.common.utils.SchemaUtils; -import cn.escheduler.common.utils.ScriptRunner; -import cn.escheduler.dao.AbstractBaseDao; -import cn.escheduler.dao.datasource.ConnectionFactory; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.text.MessageFormat; - -public abstract class UpgradeDao extends AbstractBaseDao { - - public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); - private static final String T_VERSION_NAME = "t_escheduler_version"; - private static final String T_NEW_VERSION_NAME = "t_dolphinscheduler_version"; - private static final String rootDir = System.getProperty("user.dir"); - private static final DbType dbType = getCurrentDbType(); - - @Override - protected void init() { - - } - - /** - * get db type - * @return - */ - public static DbType getDbType(){ - return dbType; - } - - /** - * get db type - * @return - */ - private static DbType getCurrentDbType(){ - Connection conn = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - String name = conn.getMetaData().getDatabaseProductName().toUpperCase(); - return DbType.valueOf(name); - } catch (Exception e) { - logger.error(e.getMessage(),e); - return null; - }finally { - ConnectionUtils.releaseResource(null, null, conn); - } - } - - public void initSchema(){ - DbType dbType = getDbType(); - String initSqlPath = ""; - if (dbType != null) { - switch (dbType) { - case MYSQL: - initSqlPath = "/sql/create/release-1.0.0_schema/mysql/"; - initSchema(initSqlPath); - break; - case POSTGRESQL: - initSqlPath = "/sql/create/release-1.2.0_schema/postgresql/"; - initSchema(initSqlPath); - break; - default: - logger.error("not support sql type: {},can't upgrade", dbType); - throw new IllegalArgumentException("not support sql type,can't upgrade"); - } - } - } - - - public void initSchema(String initSqlPath) { - - // Execute the escheduler DDL, it cannot be rolled back - runInitDDL(initSqlPath); - - // Execute the escheduler DML, it can be rolled back - runInitDML(initSqlPath); - - } - - private void runInitDML(String initSqlPath) { - Connection conn = null; - if (StringUtils.isEmpty(rootDir)) { - throw new RuntimeException("Environment variable user.dir not found"); - } - //String mysqlSQLFilePath = rootDir + "/sql/create/release-1.0.0_schema/mysql/escheduler_dml.sql"; - String mysqlSQLFilePath = rootDir + initSqlPath + "dolphinscheduler_dml.sql"; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - conn.setAutoCommit(false); - // 执行escheduler_dml.sql脚本,导入escheduler相关的数据 - // Execute the ark_manager_dml.sql script to import the data related to escheduler - - ScriptRunner initScriptRunner = new ScriptRunner(conn, false, true); - Reader initSqlReader = new FileReader(new File(mysqlSQLFilePath)); - initScriptRunner.runScript(initSqlReader); - - conn.commit(); - } catch (IOException e) { - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); - } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } catch (Exception e) { - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); - } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } finally { - ConnectionUtils.releaseResource(null, null, conn); - - } - - } - - private void runInitDDL(String initSqlPath) { - Connection conn = null; - if (StringUtils.isEmpty(rootDir)) { - throw new RuntimeException("Environment variable user.dir not found"); - } - //String mysqlSQLFilePath = rootDir + "/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql"; - String mysqlSQLFilePath = rootDir + initSqlPath + "dolphinscheduler_ddl.sql"; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - // Execute the escheduler_ddl.sql script to create the table structure of escheduler - ScriptRunner initScriptRunner = new ScriptRunner(conn, true, true); - Reader initSqlReader = new FileReader(new File(mysqlSQLFilePath)); - initScriptRunner.runScript(initSqlReader); - - } catch (IOException e) { - - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } catch (Exception e) { - - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } finally { - ConnectionUtils.releaseResource(null, null, conn); - - } - - } - - /** - * Determines whether a table exists - * @param tableName - * @return - */ - public abstract boolean isExistsTable(String tableName); - - /** - * Determines whether a field exists in the specified table - * @param tableName - * @param columnName - * @return - */ - public abstract boolean isExistsColumn(String tableName,String columnName); - - - public String getCurrentVersion(String versionName) { - String sql = String.format("select version from %s",versionName); - Connection conn = null; - ResultSet rs = null; - PreparedStatement pstmt = null; - String version = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - pstmt = conn.prepareStatement(sql); - rs = pstmt.executeQuery(); - - if (rs.next()) { - version = rs.getString(1); - } - - return version; - - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException("sql: " + sql, e); - } finally { - ConnectionUtils.releaseResource(rs, pstmt, conn); - } - } - - - public void upgradeDolphinScheduler(String schemaDir) { - - upgradeDolphinSchedulerDDL(schemaDir); - - upgradeDolphinSchedulerDML(schemaDir); - - } - - private void upgradeDolphinSchedulerDML(String schemaDir) { - String schemaVersion = schemaDir.split("_")[0]; - if (StringUtils.isEmpty(rootDir)) { - throw new RuntimeException("Environment variable user.dir not found"); - } - String mysqlSQLFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_dml.sql",rootDir,schemaDir,getDbType().name().toLowerCase()); - logger.info("mysqlSQLFilePath"+mysqlSQLFilePath); - Connection conn = null; - PreparedStatement pstmt = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - conn.setAutoCommit(false); - // Execute the upgraded escheduler dml - ScriptRunner scriptRunner = new ScriptRunner(conn, false, true); - Reader sqlReader = new FileReader(new File(mysqlSQLFilePath)); - scriptRunner.runScript(sqlReader); - if (isExistsTable(T_VERSION_NAME)) { - // Change version in the version table to the new version - String upgradeSQL = String.format("update %s set version = ?",T_VERSION_NAME); - pstmt = conn.prepareStatement(upgradeSQL); - pstmt.setString(1, schemaVersion); - pstmt.executeUpdate(); - }else if (isExistsTable(T_NEW_VERSION_NAME)) { - // Change version in the version table to the new version - String upgradeSQL = String.format("update %s set version = ?",T_NEW_VERSION_NAME); - pstmt = conn.prepareStatement(upgradeSQL); - pstmt.setString(1, schemaVersion); - pstmt.executeUpdate(); - } - conn.commit(); - } catch (FileNotFoundException e) { - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); - } - logger.error(e.getMessage(),e); - throw new RuntimeException("sql file not found ", e); - } catch (IOException e) { - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); - } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } catch (SQLException e) { - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); - } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } catch (Exception e) { - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); - } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } finally { - ConnectionUtils.releaseResource(null, pstmt, conn); - } - - } - - private void upgradeDolphinSchedulerDDL(String schemaDir) { - if (StringUtils.isEmpty(rootDir)) { - throw new RuntimeException("Environment variable user.dir not found"); - } - String mysqlSQLFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_ddl.sql",rootDir,schemaDir,getDbType().name().toLowerCase()); - Connection conn = null; - PreparedStatement pstmt = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - String dbName = conn.getCatalog(); - logger.info(dbName); - conn.setAutoCommit(true); - // Execute the escheduler ddl.sql for the upgrade - ScriptRunner scriptRunner = new ScriptRunner(conn, true, true); - Reader sqlReader = new FileReader(new File(mysqlSQLFilePath)); - scriptRunner.runScript(sqlReader); - - } catch (FileNotFoundException e) { - - logger.error(e.getMessage(),e); - throw new RuntimeException("sql file not found ", e); - } catch (IOException e) { - - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } catch (SQLException e) { - - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } catch (Exception e) { - - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); - } finally { - ConnectionUtils.releaseResource(null, pstmt, conn); - } - - } - - - - public void updateVersion(String version) { - // Change version in the version table to the new version - String versionName = T_VERSION_NAME; - if(!SchemaUtils.isAGreatVersion("1.2.0" , version)){ - versionName = "t_dolphinscheduler_version"; - } - String upgradeSQL = String.format("update %s set version = ?",versionName); - PreparedStatement pstmt = null; - Connection conn = null; - try { - conn = ConnectionFactory.getDataSource().getConnection(); - pstmt = conn.prepareStatement(upgradeSQL); - pstmt.setString(1, version); - pstmt.executeUpdate(); - - } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException("sql: " + upgradeSQL, e); - } finally { - ConnectionUtils.releaseResource(null, pstmt, conn); - } - - } - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/CreateDolphinScheduler.java b/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/CreateDolphinScheduler.java deleted file mode 100644 index 2c827dfea4..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/CreateDolphinScheduler.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.upgrade.shell; - -import cn.escheduler.dao.upgrade.DolphinSchedulerManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * init DolphinScheduler - * - */ -public class CreateDolphinScheduler { - - private static final Logger logger = LoggerFactory.getLogger(CreateDolphinScheduler.class); - - public static void main(String[] args) { - DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); - - try { - dolphinSchedulerManager.initDolphinScheduler(); - logger.info("init DolphinScheduler finished"); - dolphinSchedulerManager.upgradeDolphinScheduler(); - logger.info("upgrade DolphinScheduler finished"); - logger.info("create DolphinScheduler success"); - } catch (Exception e) { - logger.error("create DolphinScheduler failed",e); - } - - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/InitDolphinScheduler.java b/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/InitDolphinScheduler.java deleted file mode 100644 index 4c01f7413b..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/InitDolphinScheduler.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.upgrade.shell; - -import cn.escheduler.dao.upgrade.DolphinSchedulerManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * init DolphinScheduler - * - */ -public class InitDolphinScheduler { - - private static final Logger logger = LoggerFactory.getLogger(InitDolphinScheduler.class); - - public static void main(String[] args) { - Thread.currentThread().setName("manager-InitDolphinScheduler"); - DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); - dolphinSchedulerManager.initDolphinScheduler(); - logger.info("init DolphinScheduler finished"); - - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java b/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java deleted file mode 100644 index 56e706cd2b..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.upgrade.shell; - -import cn.escheduler.dao.upgrade.DolphinSchedulerManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * upgrade DolphinScheduler database - */ -public class UpgradeDolphinScheduler { - private static final Logger logger = LoggerFactory.getLogger(UpgradeDolphinScheduler.class); - - public static void main(String[] args) { - DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); - try { - dolphinSchedulerManager.upgradeDolphinScheduler(); - logger.info("upgrade DolphinScheduler success"); - } catch (Exception e) { - logger.error(e.getMessage(),e); - logger.info("Upgrade DolphinScheduler failed"); - throw new RuntimeException(e); - } - } - - - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/BeanContext.java b/escheduler-dao/src/main/java/cn/escheduler/dao/utils/BeanContext.java deleted file mode 100644 index b768c050fe..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/BeanContext.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.utils; - - -import org.springframework.beans.BeansException; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.stereotype.Component; - -/** - * bean context - */ -@Component - public class BeanContext implements ApplicationContextAware { - private static ApplicationContext applicationContext; - - public static ApplicationContext getApplicationContext(){ - return applicationContext; - } - - @SuppressWarnings("unchecked") - public static T getBean(String name) throws BeansException { - return (T)applicationContext.getBean(name); - } - - public static T getBean(Class clz) throws BeansException { - return applicationContext.getBean(clz); - } - - - - @Override - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - BeanContext.applicationContext = applicationContext; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/DagHelper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/utils/DagHelper.java deleted file mode 100644 index 5f9c2fdf58..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/DagHelper.java +++ /dev/null @@ -1,330 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.utils; - - -import cn.escheduler.common.enums.TaskDependType; -import cn.escheduler.common.graph.DAG; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.model.TaskNodeRelation; -import cn.escheduler.common.process.ProcessDag; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.entity.ProcessData; -import cn.escheduler.dao.entity.TaskInstance; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; - -/** - * dag tools - */ -public class DagHelper { - - - private static final Logger logger = LoggerFactory.getLogger(DagHelper.class); - - /** - * generate flow node relation list by task node list; - * Edges that are not in the task Node List will not be added to the result - * 根据task Node List生成node关系列表,不在task Node List中的边不会被添加到结果中 - * - * @param taskNodeList - * @return - */ - private static List generateRelationListByFlowNodes(List taskNodeList) { - List nodeRelationList = new ArrayList<>(); - for (TaskNode taskNode : taskNodeList) { - String preTasks = taskNode.getPreTasks(); - List preTaskList = JSONUtils.toList(preTasks, String.class); - if (preTaskList != null) { - for (String depNodeName : preTaskList) { - if (null != findNodeByName(taskNodeList, depNodeName)) { - nodeRelationList.add(new TaskNodeRelation(depNodeName, taskNode.getName())); - } - } - } - } - return nodeRelationList; - } - - /** - * generate task nodes needed by dag - * 生成dag需要的task nodes - * - * @param taskNodeList - * @param taskDependType - * @return - */ - private static List generateFlowNodeListByStartNode(List taskNodeList, List startNodeNameList, - List recoveryNodeNameList, TaskDependType taskDependType) { - List destFlowNodeList = new ArrayList<>(); - List startNodeList = startNodeNameList; - - if(taskDependType != TaskDependType.TASK_POST - && startNodeList.size() == 0){ - logger.error("start node list is empty! cannot continue run the process "); - return destFlowNodeList; - } - List destTaskNodeList = new ArrayList<>(); - List tmpTaskNodeList = new ArrayList<>(); - if (taskDependType == TaskDependType.TASK_POST - && recoveryNodeNameList.size() > 0) { - startNodeList = recoveryNodeNameList; - } - if (startNodeList == null || startNodeList.size() == 0) { - // 没有特殊的指定start nodes - tmpTaskNodeList = taskNodeList; - } else { - // 指定了start nodes or 恢复执行 - for (String startNodeName : startNodeList) { - TaskNode startNode = findNodeByName(taskNodeList, startNodeName); - List childNodeList = new ArrayList<>(); - if (TaskDependType.TASK_POST == taskDependType) { - childNodeList = getFlowNodeListPost(startNode, taskNodeList); - } else if (TaskDependType.TASK_PRE == taskDependType) { - childNodeList = getFlowNodeListPre(startNode, recoveryNodeNameList, taskNodeList); - } else { - childNodeList.add(startNode); - } - tmpTaskNodeList.addAll(childNodeList); - } - } - - for (TaskNode taskNode : tmpTaskNodeList) { - if (null == findNodeByName(destTaskNodeList, taskNode.getName())) { - destTaskNodeList.add(taskNode); - } - } - return destTaskNodeList; - } - - - /** - * find all the nodes that depended on the start node - * 找到所有依赖start node的node - * - * @param startNode - * @param taskNodeList - * @return - */ - private static List getFlowNodeListPost(TaskNode startNode, List taskNodeList) { - List resultList = new ArrayList<>(); - for (TaskNode taskNode : taskNodeList) { - List depList = taskNode.getDepList(); - if (depList != null) { - if (depList.contains(startNode.getName())) { - resultList.addAll(getFlowNodeListPost(taskNode, taskNodeList)); - } - } - - } - resultList.add(startNode); - return resultList; - } - - /** - * find all nodes that start nodes depend on. - * 找到所有start node依赖的node - * - * @param startNode - * @param taskNodeList - * @return - */ - private static List getFlowNodeListPre(TaskNode startNode, List recoveryNodeNameList, List taskNodeList) { - - List resultList = new ArrayList<>(); - - List depList = startNode.getDepList(); - resultList.add(startNode); - if (depList == null || depList.size() == 0) { - return resultList; - } - for (String depNodeName : depList) { - TaskNode start = findNodeByName(taskNodeList, depNodeName); - if (recoveryNodeNameList.contains(depNodeName)) { - resultList.add(start); - } else { - resultList.addAll(getFlowNodeListPre(start, recoveryNodeNameList, taskNodeList)); - } - } - return resultList; - } - - /** - * generate dag by start nodes and recovery nodes - * 根据start nodes 和 recovery nodes 生成dag - * @param processDefinitionJson - * @param startNodeNameList - * @param recoveryNodeNameList - * @param depNodeType - * @return - * @throws Exception - */ - public static ProcessDag generateFlowDag(String processDefinitionJson, - List startNodeNameList, - List recoveryNodeNameList, - TaskDependType depNodeType) throws Exception { - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - List taskNodeList = processData.getTasks(); - List destTaskNodeList = generateFlowNodeListByStartNode(taskNodeList, startNodeNameList, recoveryNodeNameList, depNodeType); - if (destTaskNodeList.isEmpty()) { - return null; - } - List taskNodeRelations = generateRelationListByFlowNodes(destTaskNodeList); - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(destTaskNodeList); - return processDag; - } - - /** - * parse the forbidden task nodes in process definition. - * @param processDefinitionJson - * @return - */ - public static Map getForbiddenTaskNodeMaps(String processDefinitionJson){ - Map forbidTaskNodeMap = new ConcurrentHashMap<>(); - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - List taskNodeList = processData.getTasks(); - for(TaskNode node : taskNodeList){ - if(node.isForbidden()){ - forbidTaskNodeMap.putIfAbsent(node.getName(), node); - } - } - return forbidTaskNodeMap; - } - - - /** - * find node by node name - * 通过 name 获取节点 - * @param nodeDetails - * @param nodeName - * @return - * @see TaskNode - */ - public static TaskNode findNodeByName(List nodeDetails, String nodeName) { - for (TaskNode taskNode : nodeDetails) { - if (taskNode.getName().equals(nodeName)) { - return taskNode; - } - } - return null; - } - - - /** - * get start vertex in one dag - * it would find the post node if the start vertex is forbidden running - * @param parentNodeName the previous node - * @param dag - * @param completeTaskList - * @return - */ - public static Collection getStartVertex(String parentNodeName, DAG dag, - Map completeTaskList){ - - if(completeTaskList == null){ - completeTaskList = new HashMap<>(); - } - Collection startVertexs = null; - if(StringUtils.isNotEmpty(parentNodeName)){ - startVertexs = dag.getSubsequentNodes(parentNodeName); - }else{ - startVertexs = dag.getBeginNode(); - } - - List tmpStartVertexs = new ArrayList<>(); - if(startVertexs!= null){ - tmpStartVertexs.addAll(startVertexs); - } - - for(String start : startVertexs){ - TaskNode startNode = dag.getNode(start); - if(!startNode.isForbidden() && !completeTaskList.containsKey(start)){ - continue; - } - Collection postNodes = getStartVertex(start, dag, completeTaskList); - - for(String post : postNodes){ - if(checkForbiddenPostCanSubmit(post, dag)){ - tmpStartVertexs.add(post); - } - } - tmpStartVertexs.remove(start); - } - - return tmpStartVertexs; - } - - /** - * - * @param postNodeName - * @param dag - * @return - */ - private static boolean checkForbiddenPostCanSubmit(String postNodeName, DAG dag){ - - TaskNode postNode = dag.getNode(postNodeName); - List dependList = postNode.getDepList(); - - for(String dependNodeName : dependList){ - TaskNode dependNode = dag.getNode(dependNodeName); - if(!dependNode.isForbidden()){ - return false; - } - } - return true; - } - - - - /*** - * generate dag graph - * @param processDag - * @return - */ - public static DAG buildDagGraph(ProcessDag processDag) { - - DAG dag = new DAG<>(); - - /** - * add vertex - */ - if (CollectionUtils.isNotEmpty(processDag.getNodes())){ - for (TaskNode node : processDag.getNodes()){ - dag.addNode(node.getName(),node); - } - } - - /** - * add edge - */ - if (CollectionUtils.isNotEmpty(processDag.getEdges())){ - for (TaskNodeRelation edge : processDag.getEdges()){ - dag.addEdge(edge.getStartNode(),edge.getEndNode()); - } - } - return dag; - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/PropertyUtils.java b/escheduler-dao/src/main/java/cn/escheduler/dao/utils/PropertyUtils.java deleted file mode 100644 index e91dcedb2c..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/PropertyUtils.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.utils; - -import cn.escheduler.common.Constants; -import com.baomidou.mybatisplus.core.toolkit.IOUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - - -/** - * property utils - * single instance - */ -public class PropertyUtils { - - /** - * logger - */ - private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class); - - private static final Properties properties = new Properties(); - - private static final PropertyUtils propertyUtils = new PropertyUtils(); - - private PropertyUtils(){ - init(); - } - - private void init(){ - String[] propertyFiles = new String[]{Constants.DAO_PROPERTIES_PATH}; - for (String fileName : propertyFiles) { - InputStream fis = null; - try { - fis = PropertyUtils.class.getResourceAsStream(fileName); - properties.load(fis); - - } catch (IOException e) { - logger.error(e.getMessage(), e); - System.exit(1); - } finally { - IOUtils.closeQuietly(fis); - } - } - } - -/* - public static PropertyUtils getInstance(){ - return propertyUtils; - } -*/ - - /** - * get property value - * - * @param key property name - * @return - */ - public static String getString(String key) { - return properties.getProperty(key); - } - - /** - * get property value - * - * @param key property name - * @return get property int value , if key == null, then return -1 - */ - public static int getInt(String key) { - return getInt(key, -1); - } - - /** - * - * @param key - * @param defaultValue - * @return - */ - public static int getInt(String key, int defaultValue) { - String value = getString(key); - if (value == null) { - return defaultValue; - } - - try { - return Integer.parseInt(value); - } catch (NumberFormatException e) { - logger.info(e.getMessage(),e); - } - return defaultValue; - } - - /** - * get property value - * - * @param key property name - * @return - */ - public static Boolean getBoolean(String key) { - String value = properties.getProperty(key.trim()); - if(null != value){ - return Boolean.parseBoolean(value); - } - - return null; - } - - /** - * - * @param key - * @return - */ - public static long getLong(String key) { - return getLong(key,-1); - } - - /** - * - * @param key - * @param defaultVal - * @return - */ - public static long getLong(String key, long defaultVal) { - String val = getString(key); - return val == null ? defaultVal : Long.parseLong(val); - } - - - /** - * - * @param key - * @param defaultVal - * @return - */ - public double getDouble(String key, double defaultVal) { - String val = getString(key); - return val == null ? defaultVal : Double.parseDouble(val); - } - - - /** - * get array - * @param key property name - * @param splitStr separator - * @return - */ - public static String[] getArray(String key, String splitStr) { - String value = getString(key); - if (value == null) { - return null; - } - try { - String[] propertyArray = value.split(splitStr); - return propertyArray; - } catch (NumberFormatException e) { - logger.info(e.getMessage(),e); - } - return null; - } - - /** - * - * @param key - * @param type - * @param defaultValue - * @param - * @return get enum value - */ - public > T getEnum(String key, Class type, - T defaultValue) { - String val = getString(key); - return val == null ? defaultValue : Enum.valueOf(type, val); - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/AbstractCycle.java b/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/AbstractCycle.java deleted file mode 100644 index 3d75256737..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/AbstractCycle.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.utils.cron; - -import cn.escheduler.common.enums.CycleEnum; -import com.cronutils.model.Cron; -import com.cronutils.model.field.CronField; -import com.cronutils.model.field.CronFieldName; -import com.cronutils.model.field.expression.*; - -/** - * Cycle - */ -public abstract class AbstractCycle { - - protected Cron cron; - - protected CronField minField; - protected CronField hourField; - protected CronField dayOfMonthField; - protected CronField dayOfWeekField; - protected CronField monthField; - protected CronField yearField; - - public CycleLinks addCycle(AbstractCycle cycle) { - return new CycleLinks(this.cron).addCycle(this).addCycle(cycle); - } - - public AbstractCycle(Cron cron) { - if (cron == null) { - throw new IllegalArgumentException("cron must not be null!"); - } - - this.cron = cron; - this.minField = cron.retrieve(CronFieldName.MINUTE); - this.hourField = cron.retrieve(CronFieldName.HOUR); - this.dayOfMonthField = cron.retrieve(CronFieldName.DAY_OF_MONTH); - this.dayOfWeekField = cron.retrieve(CronFieldName.DAY_OF_WEEK); - this.monthField = cron.retrieve(CronFieldName.MONTH); - this.yearField = cron.retrieve(CronFieldName.YEAR); - } - - /** - * Whether the minute field has a value - * @return - */ - protected boolean minFiledIsSetAll(){ - FieldExpression minFieldExpression = minField.getExpression(); - return (minFieldExpression instanceof Every || minFieldExpression instanceof Always - || minFieldExpression instanceof Between || minFieldExpression instanceof And - || minFieldExpression instanceof On); - } - - - /** - * Whether the minute field has a value of every or always - * @return - */ - protected boolean minFiledIsEvery(){ - FieldExpression minFieldExpression = minField.getExpression(); - return (minFieldExpression instanceof Every || minFieldExpression instanceof Always); - } - - /** - * Whether the hour field has a value - * @return - */ - protected boolean hourFiledIsSetAll(){ - FieldExpression hourFieldExpression = hourField.getExpression(); - return (hourFieldExpression instanceof Every || hourFieldExpression instanceof Always - || hourFieldExpression instanceof Between || hourFieldExpression instanceof And - || hourFieldExpression instanceof On); - } - - /** - * Whether the hour field has a value of every or always - * @return - */ - protected boolean hourFiledIsEvery(){ - FieldExpression hourFieldExpression = hourField.getExpression(); - return (hourFieldExpression instanceof Every || hourFieldExpression instanceof Always); - } - - /** - * Whether the day Of month field has a value - * @return - */ - protected boolean dayOfMonthFieldIsSetAll(){ - return (dayOfMonthField.getExpression() instanceof Every || dayOfMonthField.getExpression() instanceof Always - || dayOfMonthField.getExpression() instanceof Between || dayOfMonthField.getExpression() instanceof And - || dayOfMonthField.getExpression() instanceof On); - } - - - /** - * Whether the day Of Month field has a value of every or always - * @return - */ - protected boolean dayOfMonthFieldIsEvery(){ - return (dayOfMonthField.getExpression() instanceof Every || dayOfMonthField.getExpression() instanceof Always); - } - - /** - * Whether month field has a value - * @return - */ - protected boolean monthFieldIsSetAll(){ - FieldExpression monthFieldExpression = monthField.getExpression(); - return (monthFieldExpression instanceof Every || monthFieldExpression instanceof Always - || monthFieldExpression instanceof Between || monthFieldExpression instanceof And - || monthFieldExpression instanceof On); - } - - /** - * Whether the month field has a value of every or always - * @return - */ - protected boolean monthFieldIsEvery(){ - FieldExpression monthFieldExpression = monthField.getExpression(); - return (monthFieldExpression instanceof Every || monthFieldExpression instanceof Always); - } - - /** - * Whether the day Of week field has a value - * @return - */ - protected boolean dayofWeekFieldIsSetAll(){ - FieldExpression dayOfWeekFieldExpression = dayOfWeekField.getExpression(); - return (dayOfWeekFieldExpression instanceof Every || dayOfWeekFieldExpression instanceof Always - || dayOfWeekFieldExpression instanceof Between || dayOfWeekFieldExpression instanceof And - || dayOfWeekFieldExpression instanceof On); - } - - /** - * Whether the day Of week field has a value of every or always - * @return - */ - protected boolean dayofWeekFieldIsEvery(){ - FieldExpression dayOfWeekFieldExpression = dayOfWeekField.getExpression(); - return (dayOfWeekFieldExpression instanceof Every || dayOfWeekFieldExpression instanceof Always); - } - /** - * get cycle enum - * - * @return - */ - protected abstract CycleEnum getCycle(); - - /** - * get mini level cycle enum - * - * @return - */ - protected abstract CycleEnum getMiniCycle(); -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CronUtils.java b/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CronUtils.java deleted file mode 100644 index 7579ac15a0..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CronUtils.java +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.utils.cron; - - -import cn.escheduler.common.enums.CycleEnum; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.common.utils.DateUtils; -import com.cronutils.model.Cron; -import com.cronutils.model.definition.CronDefinitionBuilder; -import com.cronutils.parser.CronParser; -import org.quartz.CronExpression; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.text.ParseException; -import java.util.*; - -import static cn.escheduler.dao.utils.cron.CycleFactory.*; -import static com.cronutils.model.CronType.QUARTZ; - - -/** - * cron utils - */ -public class CronUtils { - - private static final Logger logger = LoggerFactory.getLogger(CronUtils.class); - - - private static final CronParser QUARTZ_CRON_PARSER = new CronParser(CronDefinitionBuilder.instanceDefinitionFor(QUARTZ)); - - /** - * Parse string with cron expression to Cron - * - * @param cronExpression - * - cron expression, never null - * @return Cron instance, corresponding to cron expression received - * @throws java.lang.IllegalArgumentException - * if expression does not match cron definition - */ - public static Cron parse2Cron(String cronExpression) { - return QUARTZ_CRON_PARSER.parse(cronExpression); - } - - /** - * build a new CronExpression based on the string cronExpression. - * - * @param cronExpression String representation of the cron expression the - * new object should represent - * @throws java.text.ParseException - * if the string expression cannot be parsed into a valid - * CronExpression - */ - public static CronExpression parse2CronExpression(String cronExpression) throws ParseException { - return new CronExpression(cronExpression); - } - - /** - * get cycle enum - * @param cron - * @return - */ - public static CycleEnum getMaxCycle(Cron cron) { - return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).getCycle(); - } - - /** - * get cycle enum - * @param cron - * @return - */ - public static CycleEnum getMiniCycle(Cron cron) { - return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).getMiniCycle(); - } - - /** - * get mini level of cycle enum - * - * @param crontab - * @return - */ - public static CycleEnum getMiniCycle(String crontab) { - return getMiniCycle(parse2Cron(crontab)); - } - - /** - * get cycle enum - * - * @param crontab - * @return - */ - public static CycleEnum getMaxCycle(String crontab) { - return getMaxCycle(parse2Cron(crontab)); - } - - /** - * gets all scheduled times for a period of time based on not self dependency - * @param startTime - * @param endTime - * @param cronExpression - * @return - */ - public static List getFireDateList(Date startTime, Date endTime, CronExpression cronExpression) { - List dateList = new ArrayList<>(); - - while (Stopper.isRunning()) { - startTime = cronExpression.getNextValidTimeAfter(startTime); - if (startTime.after(endTime)) { - break; - } - dateList.add(startTime); - } - - return dateList; - } - - /** - * gets all scheduled times for a period of time based on self dependency - * @param startTime - * @param endTime - * @param cronExpression - * @return - */ - public static List getSelfFireDateList(Date startTime, Date endTime, CronExpression cronExpression) { - List dateList = new ArrayList<>(); - - while (Stopper.isRunning()) { - startTime = cronExpression.getNextValidTimeAfter(startTime); - if (startTime.after(endTime) || startTime.equals(endTime)) { - break; - } - dateList.add(startTime); - } - - return dateList; - } - - - /** - * get expiration time - * @param startTime - * @param cycleEnum - * @return - */ - public static Date getExpirationTime(Date startTime, CycleEnum cycleEnum) { - Date maxExpirationTime = null; - Date startTimeMax = null; - try { - startTimeMax = getEndTime(startTime); - - Calendar calendar = Calendar.getInstance(); - calendar.setTime(startTime); - switch (cycleEnum) { - case HOUR: - calendar.add(Calendar.HOUR, 1); - break; - case DAY: - calendar.add(Calendar.DATE, 1); - break; - case WEEK: - calendar.add(Calendar.DATE, 1); - break; - case MONTH: - calendar.add(Calendar.DATE, 1); - break; - default: - logger.error("Dependent process definition's cycleEnum is {},not support!!", cycleEnum.name()); - break; - } - maxExpirationTime = calendar.getTime(); - } catch (Exception e) { - logger.error(e.getMessage(),e); - } - return DateUtils.compare(startTimeMax,maxExpirationTime)?maxExpirationTime:startTimeMax; - } - - /** - * get the end time of the day by value of date - * @param date - * @return - */ - private static Date getEndTime(Date date) { - Calendar end = new GregorianCalendar(); - end.setTime(date); - end.set(Calendar.HOUR_OF_DAY,23); - end.set(Calendar.MINUTE,59); - end.set(Calendar.SECOND,59); - end.set(Calendar.MILLISECOND,999); - return end.getTime(); - } - -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CycleFactory.java b/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CycleFactory.java deleted file mode 100644 index fabf289b96..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CycleFactory.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.utils.cron; - -import cn.escheduler.common.enums.CycleEnum; -import com.cronutils.model.Cron; -import com.cronutils.model.field.expression.Always; -import com.cronutils.model.field.expression.QuestionMark; - -/** - * Crontab Cycle Tool Factory - */ -public class CycleFactory { - - public static AbstractCycle min(Cron cron) { - return new MinCycle(cron); - } - - public static AbstractCycle hour(Cron cron) { - return new HourCycle(cron); - } - - public static AbstractCycle day(Cron cron) { - return new DayCycle(cron); - } - - public static AbstractCycle week(Cron cron) { - return new WeekCycle(cron); - } - - public static AbstractCycle month(Cron cron) { - return new MonthCycle(cron); - } - - /** - * day cycle - */ - public static class DayCycle extends AbstractCycle { - - public DayCycle(Cron cron) { - super(cron); - } - - @Override - protected CycleEnum getCycle() { - - if (minFiledIsSetAll() - && hourFiledIsSetAll() - && dayOfMonthFieldIsEvery() - && dayOfWeekField.getExpression() instanceof QuestionMark - && monthField.getExpression() instanceof Always) { - return CycleEnum.DAY; - } - - return null; - } - - @Override - protected CycleEnum getMiniCycle() { - if (dayOfMonthFieldIsEvery()) { - return CycleEnum.DAY; - } - - return null; - } - } - - /** - * hour cycle - */ - public static class HourCycle extends AbstractCycle { - - public HourCycle(Cron cron) { - super(cron); - } - - @Override - protected CycleEnum getCycle() { - if (minFiledIsSetAll() - && hourFiledIsEvery() - && dayOfMonthField.getExpression() instanceof Always - && dayOfWeekField.getExpression() instanceof QuestionMark - && monthField.getExpression() instanceof Always) { - return CycleEnum.HOUR; - } - - return null; - } - - @Override - protected CycleEnum getMiniCycle() { - if(hourFiledIsEvery()){ - return CycleEnum.HOUR; - } - return null; - } - } - - /** - * minute cycle - */ - public static class MinCycle extends AbstractCycle { - - public MinCycle(Cron cron) { - super(cron); - } - - @Override - protected CycleEnum getCycle() { - if (minFiledIsEvery() - && hourField.getExpression() instanceof Always - && dayOfMonthField.getExpression() instanceof Always - && monthField.getExpression() instanceof Always) { - return CycleEnum.MINUTE; - } - - return null; - } - - @Override - protected CycleEnum getMiniCycle() { - if(minFiledIsEvery()){ - return CycleEnum.MINUTE; - } - return null; - } - } - - /** - * month cycle - */ - public static class MonthCycle extends AbstractCycle { - - public MonthCycle(Cron cron) { - super(cron); - } - - @Override - protected CycleEnum getCycle() { - boolean flag = (minFiledIsSetAll() - && hourFiledIsSetAll() - && dayOfMonthFieldIsSetAll() - && dayOfWeekField.getExpression() instanceof QuestionMark - && monthFieldIsEvery()) || - (minFiledIsSetAll() - && hourFiledIsSetAll() - && dayOfMonthField.getExpression() instanceof QuestionMark - && dayofWeekFieldIsSetAll() - && monthFieldIsEvery()); - if (flag) { - return CycleEnum.MONTH; - } - - return null; - } - - @Override - protected CycleEnum getMiniCycle() { - if (monthFieldIsEvery()) { - return CycleEnum.MONTH; - } - - return null; - } - } - - /** - * week cycle - */ - public static class WeekCycle extends AbstractCycle { - public WeekCycle(Cron cron) { - super(cron); - } - - @Override - protected CycleEnum getCycle() { - if (minFiledIsSetAll() - && hourFiledIsSetAll() - && dayOfMonthField.getExpression() instanceof QuestionMark - && dayofWeekFieldIsEvery() - && monthField.getExpression() instanceof Always) { - return CycleEnum.WEEK; - } - - return null; - } - - @Override - protected CycleEnum getMiniCycle() { - if (dayofWeekFieldIsEvery()) { - return CycleEnum.WEEK; - } - - return null; - } - } -} diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CycleLinks.java b/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CycleLinks.java deleted file mode 100644 index 3447e267ca..0000000000 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/utils/cron/CycleLinks.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.utils.cron; - -import cn.escheduler.common.enums.CycleEnum; -import com.cronutils.model.Cron; - -import java.util.ArrayList; -import java.util.List; - -/** - * 链接判断工具 - */ -public class CycleLinks extends AbstractCycle { - private final List cycleList = new ArrayList<>(); - - public CycleLinks(Cron cron) { - super(cron); - } - - @Override - public CycleLinks addCycle(AbstractCycle cycle) { - cycleList.add(cycle); - return this; - } - - @Override - protected CycleEnum getCycle() { - for (AbstractCycle abstractCycle : cycleList) { - CycleEnum cycle = abstractCycle.getCycle(); - if (cycle != null) { - return cycle; - } - } - - return null; - } - - @Override - protected CycleEnum getMiniCycle() { - for (AbstractCycle cycleHelper : cycleList) { - CycleEnum cycle = cycleHelper.getMiniCycle(); - if (cycle != null) { - return cycle; - } - } - - return null; - } -} \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AccessTokenMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AccessTokenMapper.xml deleted file mode 100644 index a9f0c59ba1..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AccessTokenMapper.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertGroupMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertGroupMapper.xml deleted file mode 100644 index 2a86c9360d..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertGroupMapper.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertMapper.xml deleted file mode 100644 index 5211ae0751..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/AlertMapper.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/CommandMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/CommandMapper.xml deleted file mode 100644 index fdc8aecb5b..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/CommandMapper.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceMapper.xml deleted file mode 100644 index 177b054ad0..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceMapper.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceUserMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceUserMapper.xml deleted file mode 100644 index b833a1fbdc..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/DataSourceUserMapper.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - delete from t_escheduler_datasource_user - where user_id = #{userId} - - - - delete from t_escheduler_datasource_user - where datasource_id = #{datasourceId} - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ErrorCommandMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ErrorCommandMapper.xml deleted file mode 100644 index 70cbefc65a..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ErrorCommandMapper.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessDefinitionMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessDefinitionMapper.xml deleted file mode 100644 index 5c4237115b..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessDefinitionMapper.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapMapper.xml deleted file mode 100644 index ff85f35ed7..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapMapper.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - delete - from t_escheduler_relation_process_instance - where parent_process_instance_id=#{parentProcessId} - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapper.xml deleted file mode 100644 index c92cc030a9..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProcessInstanceMapper.xml +++ /dev/null @@ -1,123 +0,0 @@ - - - - - - - - update t_escheduler_process_instance - set host=null - where host =#{host} and state in - - #{i} - - - - update t_escheduler_process_instance - set state = #{destState} - where state = #{originState} - - - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectMapper.xml deleted file mode 100644 index 49c71230ec..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectMapper.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectUserMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectUserMapper.xml deleted file mode 100644 index 0d9ba3bac5..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ProjectUserMapper.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - - delete from t_escheduler_relation_project_user - where 1=1 - and user_id = #{userId} - - and project_id = #{projectId} - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/QueueMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/QueueMapper.xml deleted file mode 100644 index 6b4a391a24..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/QueueMapper.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceMapper.xml deleted file mode 100644 index 643f9856f2..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceMapper.xml +++ /dev/null @@ -1,57 +0,0 @@ - - - - - - - - - - diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceUserMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceUserMapper.xml deleted file mode 100644 index d6c3323993..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ResourceUserMapper.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - delete - from t_escheduler_relation_resources_user - where 1 = 1 - - and user_id = #{userId} - - - and resources_id = #{resourceId} - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ScheduleMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ScheduleMapper.xml deleted file mode 100644 index 76b321aac6..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/ScheduleMapper.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/SessionMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/SessionMapper.xml deleted file mode 100644 index 530d0e984c..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/SessionMapper.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TaskInstanceMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TaskInstanceMapper.xml deleted file mode 100644 index 91260d43dc..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TaskInstanceMapper.xml +++ /dev/null @@ -1,96 +0,0 @@ - - - - - update t_escheduler_task_instance - set state = #{destStatus} - where host = #{host} and state in (#{states}) - - - - - - - - - - diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TenantMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TenantMapper.xml deleted file mode 100644 index b975f7a7e1..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/TenantMapper.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UDFUserMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UDFUserMapper.xml deleted file mode 100644 index a34df12ee9..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UDFUserMapper.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - delete from t_escheduler_relation_udfs_user - where `user_id` = #{userId} - - - delete from t_escheduler_relation_udfs_user - where `udf_id` = #{udfFuncId} - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UdfFuncMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UdfFuncMapper.xml deleted file mode 100644 index acde3ef7be..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UdfFuncMapper.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserAlertGroupMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserAlertGroupMapper.xml deleted file mode 100644 index 1b972028ec..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserAlertGroupMapper.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - delete from t_escheduler_relation_user_alertgroup - where `alertgroup_id` = #{alertgroupId} - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserMapper.xml deleted file mode 100644 index d8fecc85e6..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/UserMapper.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/WorkerGroupMapper.xml b/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/WorkerGroupMapper.xml deleted file mode 100644 index 9b82e15cc7..0000000000 --- a/escheduler-dao/src/main/resources/cn.escheduler.dao.mapper/WorkerGroupMapper.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/cron/CronUtilsTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/cron/CronUtilsTest.java deleted file mode 100644 index b9fad850e4..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/cron/CronUtilsTest.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.cron; - -import cn.escheduler.common.enums.CycleEnum; -import cn.escheduler.dao.utils.cron.CronUtils; -import com.cronutils.builder.CronBuilder; -import com.cronutils.model.Cron; -import com.cronutils.model.CronType; -import com.cronutils.model.definition.CronDefinitionBuilder; -import com.cronutils.model.field.CronField; -import com.cronutils.model.field.CronFieldName; -import com.cronutils.model.field.expression.*; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.text.ParseException; - -import static com.cronutils.model.field.expression.FieldExpressionFactory.*; - -/** - */ -public class CronUtilsTest { - - private static final Logger logger = LoggerFactory.getLogger(CronUtilsTest.class); - - @Test - public void cronAsStringTest() { - Cron cron = CronBuilder.cron(CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ)) - .withYear(always()) - .withDoW(questionMark()) - .withMonth(always()) - .withDoM(always()) - .withHour(always()) - .withMinute(every(5)) - .withSecond(on(0)) - .instance(); - // Obtain the string expression - String cronAsString = cron.asString(); // 0 */5 * * * ? * Every five minutes(每5分钟一次) - - Assert.assertEquals(cronAsString, "0 */5 * * * ? *"); - - } - - - @Test - public void testParse() throws ParseException { - String strCrontab = "0 1 2 3 * ? *"; - Cron depCron = CronUtils.parse2Cron(strCrontab); - Assert.assertEquals(depCron.retrieve(CronFieldName.SECOND).getExpression().asString(), "0"); - Assert.assertEquals(depCron.retrieve(CronFieldName.MINUTE).getExpression().asString(), "1"); - Assert.assertEquals(depCron.retrieve(CronFieldName.HOUR).getExpression().asString(), "2"); - Assert.assertEquals(depCron.retrieve(CronFieldName.DAY_OF_MONTH).getExpression().asString(), "3"); - Assert.assertEquals(depCron.retrieve(CronFieldName.MONTH).getExpression().asString(), "*"); - Assert.assertEquals(depCron.retrieve(CronFieldName.YEAR).getExpression().asString(), "*"); - } - - @Test - public void testParse1() throws ParseException { - String strCrontab = "* * 0/1 * * ? *"; - strCrontab = "0/50 0/59 * * * ? *"; - strCrontab = "3/5 * 0/5 * * ? *"; - strCrontab = "1/5 3/5 1/5 3/30 * ? *"; - Cron depCron = CronUtils.parse2Cron(strCrontab); - logger.info(depCron.validate().asString()); - } - - @Test - public void scheduleTypeTest() throws ParseException { - - CycleEnum cycleEnum = CronUtils.getMaxCycle("0 */1 * * * ? *"); - Assert.assertEquals(cycleEnum.name(), "MINUTE"); - - CycleEnum cycleEnum2 = CronUtils.getMaxCycle("0 * * * * ? *"); - Assert.assertEquals(cycleEnum2.name(), "MINUTE"); - } - - @Test - public void test2(){ - Cron cron1 = CronBuilder.cron(CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ)) - .withYear(always()) - .withDoW(questionMark()) - .withMonth(always()) - .withDoM(always()) - .withHour(always()) - .withMinute(every(5)) - .withSecond(on(0)) - .instance(); - - String cronAsString = cron1.asString(); // 0 */5 * * * ? * 每5分钟一次 - //logger.info(cronAsString); - // Obtain the string expression - //String minCrontab = "0 0 * * * ? *"; - //String minCrontab = "0 0 10,14,16 * * ?"; - //String minCrontab = "0 0-5 14 * * ? *"; - //String minCrontab = "0 0 2 ? * SUN *"; - //String minCrontab = "* 0,3 2 SUN * 1#1 *"; - //String minCrontab = "* 0,3 * 1W * ? *"; - //cron = CronUtils.parse2Cron("0 * * * * ? *"); - // 月份周期 - /*String[] cronArayy = new String[]{"* 0,3 * 1W * ? *","* 0 0 1W * ? *", - "0 0 0 L 3/5 ? *","0 0 0 ? 3/5 2/2 *"};*/ - // 分钟周期 - String[] cronArayy = new String[]{"* * * * * ? *","* 0 * * * ? *", - "* 5 * * 3/5 ? *","0 0 * * * ? *"}; - // 周周期 - /*String[] cronArayy = new String[]{"* * * ? * 2/1 *","0 *//*5 * ? * 2/1 *", - "* * *//*5 ? * 2/1 *"};*/ - for(String minCrontab:cronArayy){ - if (!org.quartz.CronExpression.isValidExpression(minCrontab)) { - throw new RuntimeException(minCrontab+" verify failure, cron expression not valid"); - } - Cron cron = CronUtils.parse2Cron(minCrontab); - CronField minField = cron.retrieve(CronFieldName.MINUTE); - logger.info("minField instanceof Between:"+(minField.getExpression() instanceof Between)); - logger.info("minField instanceof Every:"+(minField.getExpression() instanceof Every)); - logger.info("minField instanceof Always:" + (minField.getExpression() instanceof Always)); - logger.info("minField instanceof On:"+(minField.getExpression() instanceof On)); - logger.info("minField instanceof And:"+(minField.getExpression() instanceof And)); - CronField hourField = cron.retrieve(CronFieldName.HOUR); - logger.info("hourField instanceof Between:"+(hourField.getExpression() instanceof Between)); - logger.info("hourField instanceof Always:"+(hourField.getExpression() instanceof Always)); - logger.info("hourField instanceof Every:"+(hourField.getExpression() instanceof Every)); - logger.info("hourField instanceof On:"+(hourField.getExpression() instanceof On)); - logger.info("hourField instanceof And:"+(hourField.getExpression() instanceof And)); - - CronField dayOfMonthField = cron.retrieve(CronFieldName.DAY_OF_MONTH); - logger.info("dayOfMonthField instanceof Between:"+(dayOfMonthField.getExpression() instanceof Between)); - logger.info("dayOfMonthField instanceof Always:"+(dayOfMonthField.getExpression() instanceof Always)); - logger.info("dayOfMonthField instanceof Every:"+(dayOfMonthField.getExpression() instanceof Every)); - logger.info("dayOfMonthField instanceof On:"+(dayOfMonthField.getExpression() instanceof On)); - logger.info("dayOfMonthField instanceof And:"+(dayOfMonthField.getExpression() instanceof And)); - logger.info("dayOfMonthField instanceof QuestionMark:"+(dayOfMonthField.getExpression() instanceof QuestionMark)); - - CronField monthField = cron.retrieve(CronFieldName.MONTH); - logger.info("monthField instanceof Between:"+(monthField.getExpression() instanceof Between)); - logger.info("monthField instanceof Always:"+(monthField.getExpression() instanceof Always)); - logger.info("monthField instanceof Every:"+(monthField.getExpression() instanceof Every)); - logger.info("monthField instanceof On:"+(monthField.getExpression() instanceof On)); - logger.info("monthField instanceof And:"+(monthField.getExpression() instanceof And)); - logger.info("monthField instanceof QuestionMark:"+(monthField.getExpression() instanceof QuestionMark)); - - CronField dayOfWeekField = cron.retrieve(CronFieldName.DAY_OF_WEEK); - logger.info("dayOfWeekField instanceof Between:"+(dayOfWeekField.getExpression() instanceof Between)); - logger.info("dayOfWeekField instanceof Always:"+(dayOfWeekField.getExpression() instanceof Always)); - logger.info("dayOfWeekField instanceof Every:"+(dayOfWeekField.getExpression() instanceof Every)); - logger.info("dayOfWeekField instanceof On:"+(dayOfWeekField.getExpression() instanceof On)); - logger.info("dayOfWeekField instanceof And:"+(dayOfWeekField.getExpression() instanceof And)); - logger.info("dayOfWeekField instanceof QuestionMark:"+(dayOfWeekField.getExpression() instanceof QuestionMark)); - - CronField yearField = cron.retrieve(CronFieldName.YEAR); - - //CycleEnum cycleEnum = CronUtils.getMaxCycle("0 * * * * ? *"); - CycleEnum cycleEnum = CronUtils.getMaxCycle(minCrontab); - if(cycleEnum !=null){ - logger.info(cycleEnum.name()); - }else{ - logger.info("can't get scheduleType"); - } - } - - - - } -} diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AccessTokenMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AccessTokenMapperTest.java deleted file mode 100644 index 078288b5a6..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AccessTokenMapperTest.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.dao.entity.AccessToken; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import javax.annotation.Resource; -import java.util.Date; -import java.util.List; - - -@RunWith(SpringRunner.class) -@SpringBootTest -public class AccessTokenMapperTest { - - - @Resource - AccessTokenMapper accessTokenMapper; - - - private AccessToken insertOne(){ - //insertOne - AccessToken accessToken = new AccessToken(); - accessToken.setUserId(4); - accessToken.setToken("hello, access token"); - accessToken.setCreateTime(new Date()); - accessToken.setUpdateTime(new Date()); - accessToken.setExpireTime(new Date()); - accessTokenMapper.insert(accessToken); - return accessToken; - } - - @Test - public void testUpdate(){ - //insertOne - AccessToken accessToken = insertOne(); - //update - accessToken.setToken("hello, token"); - int update = accessTokenMapper.updateById(accessToken); - Assert.assertEquals(update, 1); - accessTokenMapper.deleteById(accessToken.getId()); - } - - @Test - public void testDelete(){ - - AccessToken accessToken = insertOne(); - int delete = accessTokenMapper.deleteById(accessToken.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery(){ - - AccessToken accessToken = insertOne(); - //query - List token = accessTokenMapper.selectList(null); - Assert.assertNotEquals(token.size(), 0); - accessTokenMapper.deleteById(accessToken.getId()); - } - - @Test - public void selectAccessTokenPage() { - AccessToken accessToken = insertOne(); - Page page = new Page(1, 3); - String userName = ""; - IPage accessTokenPage = accessTokenMapper.selectAccessTokenPage(page, userName, 4); - Assert.assertNotEquals(accessTokenPage.getTotal(), 0); - accessTokenMapper.deleteById(accessToken.getId()); - } - - -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AlertGroupMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AlertGroupMapperTest.java deleted file mode 100644 index 9f95ce3b70..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AlertGroupMapperTest.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.common.enums.AlertType; -import cn.escheduler.dao.entity.AccessToken; -import cn.escheduler.dao.entity.AlertGroup; -import cn.escheduler.dao.entity.UserAlertGroup; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class AlertGroupMapperTest { - - - @Autowired - AlertGroupMapper alertGroupMapper; - - @Autowired - UserAlertGroupMapper userAlertGroupMapper; - - private AlertGroup insertOne(){ - //insertOne - AlertGroup alertGroup = new AlertGroup(); - alertGroup.setGroupName("alert group 1"); - alertGroup.setDescription("alert test1"); - alertGroup.setGroupType(AlertType.EMAIL); - - alertGroup.setCreateTime(new Date()); - alertGroup.setUpdateTime(new Date()); - alertGroupMapper.insert(alertGroup); - return alertGroup; - } - - @Test - public void testUpdate(){ - //insertOne - AlertGroup alertGroup = insertOne(); - //update - alertGroup.setDescription("hello, ag"); - int update = alertGroupMapper.updateById(alertGroup); - Assert.assertEquals(update, 1); - alertGroupMapper.deleteById(alertGroup.getId()); - } - - @Test - public void testDelete(){ - - AlertGroup alertGroup = insertOne(); - int delete = alertGroupMapper.deleteById(alertGroup.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - AlertGroup alertGroup = insertOne(); - //query - List alertGroups = alertGroupMapper.selectList(null); - Assert.assertNotEquals(alertGroups.size(), 0); - alertGroupMapper.deleteById(alertGroup.getId()); - } - - - @Test - public void testQueryAlertGroupPage() { - AlertGroup alertGroup = insertOne(); - Page page = new Page(1, 3); - IPage accessTokenPage = alertGroupMapper.queryAlertGroupPage(page, - "alert" ); - Assert.assertNotEquals(accessTokenPage.getTotal(), 0); - alertGroupMapper.deleteById(alertGroup.getId()); - } - - @Test - public void testQueryByGroupName() { - - AlertGroup alertGroup = insertOne(); - List alertGroups = alertGroupMapper.queryByGroupName("alert group 1"); - Assert.assertNotEquals(alertGroups.size(), 0); - alertGroupMapper.deleteById(alertGroup.getId()); - } - - @Test - public void testQueryByUserId() { - AlertGroup alertGroup = insertOne(); - UserAlertGroup userAlertGroup = new UserAlertGroup(); - userAlertGroup.setAlertgroupId(alertGroup.getId()); - userAlertGroup.setUserId(4); - userAlertGroupMapper.insert(userAlertGroup); - List alertGroups = alertGroupMapper.queryByUserId(4); - Assert.assertNotEquals(alertGroups.size(), 0); - alertGroupMapper.deleteById(alertGroup.getId()); - userAlertGroupMapper.deleteById(userAlertGroup.getId()); - } - - @Test - public void testQueryByAlertType() { - AlertGroup alertGroup = insertOne(); - List alertGroups = alertGroupMapper.queryByAlertType(AlertType.EMAIL); - Assert.assertNotEquals(alertGroups.size(), 0); - alertGroupMapper.deleteById(alertGroup.getId()); - } - - @Test - public void testQueryAllGroupList() { - AlertGroup alertGroup = insertOne(); - List alertGroups = alertGroupMapper.queryAllGroupList(); - Assert.assertNotEquals(alertGroups.size(), 0); - alertGroupMapper.deleteById(alertGroup.getId()); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AlertMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AlertMapperTest.java deleted file mode 100644 index c1a0344c6b..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/AlertMapperTest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.common.enums.AlertStatus; -import cn.escheduler.dao.entity.Alert; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - - - -@RunWith(SpringRunner.class) -@SpringBootTest -public class AlertMapperTest { - - @Autowired - AlertMapper alertMapper; - - private Alert insertOne(){ - //insertOne - Alert alert = new Alert(); - alert.setLog("success"); - alert.setReceivers("xx@aa.com"); - alert.setAlertGroupId(1); - alert.setAlertStatus(AlertStatus.EXECUTION_SUCCESS); - alert.setCreateTime(new Date()); - alert.setUpdateTime(new Date()); - alertMapper.insert(alert); - return alert; - } - - @Test - public void testUpdate(){ - //insertOne - Alert alert = insertOne(); - //update - alert.setTitle("hello"); - int update = alertMapper.updateById(alert); - Assert.assertEquals(update, 1); - alertMapper.deleteById(alert.getId()); - } - - @Test - public void testDelete(){ - - Alert alert = insertOne(); - int delete = alertMapper.deleteById(alert.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - Alert alert = insertOne(); - //query - List alerts = alertMapper.selectList(null); - Assert.assertNotEquals(alerts.size(), 0); - alertMapper.deleteById(alert.getId()); - } - - @Test - public void testListAlertByStatus() { - Alert alert = insertOne(); - //query - List alerts = alertMapper.listAlertByStatus(AlertStatus.EXECUTION_SUCCESS); - Assert.assertNotEquals(alerts.size(), 0); - alertMapper.deleteById(alert.getId()); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/CommandMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/CommandMapperTest.java deleted file mode 100644 index 625273fa27..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/CommandMapperTest.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.common.enums.*; -import cn.escheduler.dao.entity.Command; -import cn.escheduler.dao.entity.CommandCount; -import cn.escheduler.dao.entity.ProcessDefinition; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class CommandMapperTest { - - - @Autowired - CommandMapper commandMapper; - - @Autowired - ProcessDefinitionMapper processDefinitionMapper; - - private Command insertOne(){ - //insertOne - Command command = new Command(); - command.setCommandType(CommandType.START_PROCESS); - command.setProcessDefinitionId(1); - command.setExecutorId(4); - command.setProcessInstancePriority(Priority.MEDIUM); - command.setFailureStrategy(FailureStrategy.CONTINUE); - command.setWorkerGroupId(-1); - command.setWarningGroupId(1); - command.setUpdateTime(new Date()); - commandMapper.insert(command); - return command; - } - - @Test - public void testUpdate(){ - //insertOne - Command command = insertOne(); - //update - command.setStartTime(new Date()); - int update = commandMapper.updateById(command); - Assert.assertEquals(update, 1); - commandMapper.deleteById(command.getId()); - } - - @Test - public void testDelete(){ - - Command Command = insertOne(); - int delete = commandMapper.deleteById(Command.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - Command command = insertOne(); - //query - List commands = commandMapper.selectList(null); - Assert.assertNotEquals(commands.size(), 0); - commandMapper.deleteById(command.getId()); - } - @Test - public void testGetAll() { - Command command = insertOne(); - List commands = commandMapper.selectList(null); - Assert.assertNotEquals(commands.size(), 0); - commandMapper.deleteById(command.getId()); - } - - @Test - public void testGetOneToRun() { - ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setReleaseState(ReleaseState.ONLINE); - processDefinition.setName("ut test"); - processDefinition.setProjectId(1); - processDefinition.setFlag(Flag.YES); - processDefinitionMapper.insert(processDefinition); - - Command command = new Command(); - command.setCommandType(CommandType.START_PROCESS); - command.setProcessDefinitionId(processDefinition.getId()); - command.setExecutorId(4); - command.setProcessInstancePriority(Priority.MEDIUM); - command.setFailureStrategy(FailureStrategy.CONTINUE); - command.setWorkerGroupId(-1); - command.setWarningGroupId(1); - command.setUpdateTime(new Date()); - commandMapper.insert(command); - - Command command2 = commandMapper.getOneToRun(); - Assert.assertNotEquals(command2, null); - commandMapper.deleteById(command.getId()); - processDefinitionMapper.deleteById(processDefinition.getId()); - } - - @Test - public void testCountCommandState() { - Command command = insertOne(); - - //insertOne - ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setName("def 1"); - processDefinition.setProjectId(1010); - processDefinition.setUserId(101); - processDefinition.setUpdateTime(new Date()); - processDefinition.setCreateTime(new Date()); - processDefinitionMapper.insert(processDefinition); - - command.setProcessDefinitionId(processDefinition.getId()); - commandMapper.updateById(command); - - - List commandCounts = commandMapper.countCommandState( - 4, null, null, new Integer[0] - ); - - Integer[] projectIdArray = new Integer[2]; - projectIdArray[0] = processDefinition.getProjectId(); - projectIdArray[1] = 200; - List commandCounts2 = commandMapper.countCommandState( - 4, null, null, projectIdArray - ); - - commandMapper.deleteById(command.getId()); - processDefinitionMapper.deleteById(processDefinition.getId()); - Assert.assertNotEquals(commandCounts.size(), 0); - Assert.assertNotEquals(commandCounts2.size(), 0); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/DataSourceMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/DataSourceMapperTest.java deleted file mode 100644 index 9ec9f6f513..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/DataSourceMapperTest.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.common.enums.DbType; -import cn.escheduler.dao.entity.DataSource; -import cn.escheduler.dao.entity.DatasourceUser; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class DataSourceMapperTest { - - @Autowired - DataSourceMapper dataSourceMapper; - - @Autowired - DataSourceUserMapper dataSourceUserMapper; - - private DataSource insertOne(){ - //insertOne - DataSource dataSource = new DataSource(); - dataSource.setUserId(4); - dataSource.setName("data source test"); - dataSource.setType(DbType.MYSQL); - dataSource.setNote("mysql test"); - dataSource.setConnectionParams("hello mysql"); - dataSource.setUpdateTime(new Date()); - dataSource.setCreateTime(new Date()); - dataSourceMapper.insert(dataSource); - return dataSource; - } - - @Test - public void testUpdate(){ - //insertOne - DataSource dataSource = insertOne(); - //update - dataSource.setUpdateTime(new Date()); - int update = dataSourceMapper.updateById(dataSource); - Assert.assertEquals(update, 1); - dataSourceMapper.deleteById(dataSource.getId()); - } - - @Test - public void testDelete(){ - - DataSource dataSource = insertOne(); - int delete = dataSourceMapper.deleteById(dataSource.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - DataSource dataSource = insertOne(); - //query - List dataSources = dataSourceMapper.selectList(null); - Assert.assertNotEquals(dataSources.size(), 0); - dataSourceMapper.deleteById(dataSource.getId()); - } - - @Test - public void testQueryDataSourceByType() { - DataSource dataSource = insertOne(); - //query - List dataSources = dataSourceMapper.queryDataSourceByType( - 0, DbType.MYSQL.ordinal() - ); - Assert.assertNotEquals(dataSources.size(), 0); - dataSourceMapper.deleteById(dataSource.getId()); - } - - @Test - public void testSelectPaging() { - DataSource dataSource = insertOne(); - Page page = new Page(1, 3); - IPage dataSourceIPage = dataSourceMapper.selectPaging(page, - 4, null); - Assert.assertNotEquals(dataSourceIPage.getTotal(), 0); - dataSourceMapper.deleteById(dataSource.getId()); - } - - @Test - public void testQueryDataSourceByName() { - DataSource dataSource = insertOne(); - List dataSources = dataSourceMapper.queryDataSourceByName("data source test"); - Assert.assertNotEquals(dataSources.size(), 0); - dataSourceMapper.deleteById(dataSource.getId()); - } - - @Test - public void testQueryAuthedDatasource() { - - DataSource dataSource = insertOne(); - DatasourceUser datasourceUser = new DatasourceUser(); - datasourceUser.setUserId(3); - datasourceUser.setDatasourceId(dataSource.getId()); - dataSourceUserMapper.insert(datasourceUser); - - List dataSources = dataSourceMapper.queryAuthedDatasource(3); - Assert.assertNotEquals(dataSources.size(), 0); - dataSourceMapper.deleteById(dataSource.getId()); - dataSourceUserMapper.deleteById(datasourceUser.getId()); - } - - @Test - public void testQueryDatasourceExceptUserId() { - DataSource dataSource = insertOne(); - List dataSources = dataSourceMapper.queryDatasourceExceptUserId(3); - Assert.assertNotEquals(dataSources.size(), 0); - dataSourceMapper.deleteById(dataSource.getId()); - } - - @Test - public void testListAllDataSourceByType() { - - DataSource dataSource = insertOne(); - - List dataSources = dataSourceMapper.queryDataSourceByType(4, DbType.MYSQL.ordinal()); - Assert.assertNotEquals(dataSources.size(), 0); - List dataSources2 = dataSourceMapper.queryDataSourceByType(10091, DbType.MYSQL.ordinal()); - Assert.assertEquals(dataSources2.size(), 0); - dataSourceMapper.deleteById(dataSource.getId()); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/DataSourceUserMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/DataSourceUserMapperTest.java deleted file mode 100644 index e18854b254..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/DataSourceUserMapperTest.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.dao.entity.DatasourceUser; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class DataSourceUserMapperTest { - - @Autowired - DataSourceUserMapper dataSourceUserMapper; - - - private DatasourceUser insertOne(){ - //insertOne - DatasourceUser dataSourceUser = new DatasourceUser(); - dataSourceUser.setUserId(4); - dataSourceUser.setDatasourceId(1010); - dataSourceUser.setPerm(7); - dataSourceUser.setUpdateTime(new Date()); - dataSourceUser.setCreateTime(new Date()); - return dataSourceUser; - } - - @Test - public void testUpdate(){ - //insertOne - DatasourceUser dataSourceUser = insertOne(); - //update - dataSourceUser.setUpdateTime(new Date()); - int update = dataSourceUserMapper.updateById(dataSourceUser); - Assert.assertEquals(update, 1); - dataSourceUserMapper.deleteById(dataSourceUser.getId()); - } - - @Test - public void testDelete(){ - - DatasourceUser dataSourceUser = insertOne(); - int delete = dataSourceUserMapper.deleteById(dataSourceUser.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - DatasourceUser dataSourceUser = insertOne(); - //query - List dataSources = dataSourceUserMapper.selectList(null); - Assert.assertNotEquals(dataSources.size(), 0); - dataSourceUserMapper.deleteById(dataSourceUser.getId()); - } - - @Test - public void testDeleteByUserId() { - DatasourceUser dataSourceUser = insertOne(); - int delete = dataSourceUserMapper.deleteByUserId(dataSourceUser.getUserId()); - Assert.assertNotEquals(delete, 0); - } - - @Test - public void testDeleteByDatasourceId() { - DatasourceUser dataSourceUser = insertOne(); - int delete = dataSourceUserMapper.deleteByDatasourceId(dataSourceUser.getDatasourceId()); - Assert.assertNotEquals(delete, 0); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ErrorCommandMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ErrorCommandMapperTest.java deleted file mode 100644 index 9bd9aba125..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ErrorCommandMapperTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.dao.entity.CommandCount; -import cn.escheduler.dao.entity.ErrorCommand; -import cn.escheduler.dao.entity.ProcessDefinition; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ErrorCommandMapperTest { - - @Autowired - ErrorCommandMapper errorCommandMapper; - - @Autowired - ProcessDefinitionMapper processDefinitionMapper; - - - private ErrorCommand insertOne(){ - //insertOne - ErrorCommand errorCommand = new ErrorCommand(); - errorCommand.setId(10101); - errorCommand.setCommandType(CommandType.START_PROCESS); - errorCommand.setUpdateTime(new Date()); - errorCommand.setStartTime(new Date()); - errorCommandMapper.insert(errorCommand); - return errorCommand; - } - - @Test - public void testUpdate(){ - //insertOne - ErrorCommand errorCommand = insertOne(); - //update - errorCommand.setUpdateTime(new Date()); - int update = errorCommandMapper.updateById(errorCommand); - Assert.assertEquals(update, 1); - errorCommandMapper.deleteById(errorCommand.getId()); - } - - @Test - public void testDelete(){ - - ErrorCommand errorCommand = insertOne(); - int delete = errorCommandMapper.deleteById(errorCommand.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - errorCommandMapper.delete(null); - - ErrorCommand errorCommand = insertOne(); - - ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setName("def 1"); - processDefinition.setProjectId(1010); - processDefinition.setUserId(101); - processDefinition.setUpdateTime(new Date()); - processDefinition.setCreateTime(new Date()); - processDefinitionMapper.insert(processDefinition); - - errorCommand.setProcessDefinitionId(processDefinition.getId()); - errorCommandMapper.updateById(errorCommand); - - - List commandCounts = errorCommandMapper.countCommandState( - null, - null, - new Integer[0] - ); - - Integer[] projectIdArray = new Integer[2]; - projectIdArray[0] = processDefinition.getProjectId(); - projectIdArray[1] = 200; - List commandCounts2 = errorCommandMapper.countCommandState( - null, - null, - projectIdArray - ); - - errorCommandMapper.deleteById(errorCommand.getId()); - processDefinitionMapper.deleteById(processDefinition.getId()); - Assert.assertNotEquals(commandCounts.size(), 0); - Assert.assertNotEquals(commandCounts2.size(), 0); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperTest.java deleted file mode 100644 index 221adde707..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperTest.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.dao.entity.*; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.commons.lang3.StringUtils; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ProcessDefinitionMapperTest { - - - @Autowired - ProcessDefinitionMapper processDefinitionMapper; - - @Autowired - UserMapper userMapper; - - @Autowired - QueueMapper queueMapper; - - @Autowired - TenantMapper tenantMapper; - - @Autowired - ProjectMapper projectMapper; - - private ProcessDefinition insertOne(){ - //insertOne - ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setName("def 1"); - processDefinition.setProjectId(1010); - processDefinition.setUserId(101); - processDefinition.setUpdateTime(new Date()); - processDefinition.setCreateTime(new Date()); - processDefinitionMapper.insert(processDefinition); - return processDefinition; - } - - @Test - public void testUpdate(){ - //insertOne - ProcessDefinition processDefinition = insertOne(); - //update - processDefinition.setUpdateTime(new Date()); - int update = processDefinitionMapper.updateById(processDefinition); - Assert.assertEquals(update, 1); - processDefinitionMapper.deleteById(processDefinition.getId()); - } - - @Test - public void testDelete(){ - ProcessDefinition processDefinition = insertOne(); - int delete = processDefinitionMapper.deleteById(processDefinition.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - ProcessDefinition processDefinition = insertOne(); - //query - List dataSources = processDefinitionMapper.selectList(null); - Assert.assertNotEquals(dataSources.size(), 0); - processDefinitionMapper.deleteById(processDefinition.getId()); - } - - @Test - public void testQueryByDefineName() { - Project project = new Project(); - project.setName("ut project"); - project.setUserId(4); - projectMapper.insert(project); - - Queue queue = new Queue(); - queue.setQueue("queue"); - queue.setQueueName("queue name"); - queueMapper.insert(queue); - - Tenant tenant = new Tenant(); - tenant.setTenantCode("tenant"); - tenant.setQueueId(queue.getId()); - tenant.setDescription("t"); - tenantMapper.insert(tenant); - - User user = new User(); - user.setUserName("hello"); - user.setUserPassword("pwd"); - user.setUserType(UserType.GENERAL_USER); - user.setTenantId(tenant.getId()); - userMapper.insert(user); - - //insertOne - ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setName("def 1"); - processDefinition.setProjectId(project.getId()); - processDefinition.setUpdateTime(new Date()); - processDefinition.setCreateTime(new Date()); - processDefinition.setTenantId(tenant.getId()); - processDefinition.setUserId(user.getId()); - processDefinitionMapper.insert(processDefinition); - - ProcessDefinition processDefinition1 = processDefinitionMapper.queryByDefineName(project.getId(), "def 1"); - Assert.assertNotEquals(processDefinition1, null); - processDefinitionMapper.deleteById(processDefinition.getId()); - queueMapper.deleteById(queue.getId()); - projectMapper.deleteById(project.getId()); - tenantMapper.deleteById(tenant.getId()); - userMapper.deleteById(user.getId()); - } - - @Test - public void testQueryDefineListPaging() { - ProcessDefinition processDefinition = insertOne(); - Page page = new Page(1,3); - IPage processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(page, "def", 101, 1010); - Assert.assertNotEquals(processDefinitionIPage.getTotal(), 0); - processDefinitionMapper.deleteById(processDefinition.getId()); - } - - @Test - public void testQueryAllDefinitionList() { - ProcessDefinition processDefinition = insertOne(); - List processDefinitionIPage = processDefinitionMapper.queryAllDefinitionList(1010); - Assert.assertNotEquals(processDefinitionIPage.size(), 0); - processDefinitionMapper.deleteById(processDefinition.getId()); - } - - @Test - public void testQueryDefinitionListByIdList() { - - ProcessDefinition processDefinition = insertOne(); - ProcessDefinition processDefinition1 = insertOne(); - - Integer[] array = new Integer[2]; - array[0] = processDefinition.getId(); - array[1] = processDefinition1.getId(); - - List processDefinitions = processDefinitionMapper.queryDefinitionListByIdList(array); - processDefinitionMapper.deleteById(processDefinition.getId()); - processDefinitionMapper.deleteById(processDefinition1.getId()); - Assert.assertEquals(processDefinitions.size(), 2); - - } - - @Test - public void testCountDefinitionGroupByUser() { - - User user= new User(); - user.setUserName("user1"); - user.setUserPassword("1"); - user.setEmail("xx@123.com"); - user.setUserType(UserType.GENERAL_USER); - user.setCreateTime(new Date()); - user.setTenantId(1); - user.setUpdateTime(new Date()); - userMapper.insert(user); - - ProcessDefinition processDefinition = insertOne(); - processDefinition.setUserId(user.getId()); - processDefinitionMapper.updateById(processDefinition); - - Integer[] projectIds = new Integer[1]; - projectIds[0] = processDefinition.getProjectId(); - List processDefinitions = processDefinitionMapper.countDefinitionGroupByUser( - processDefinition.getUserId(), - projectIds - ); - processDefinitionMapper.deleteById(processDefinition.getId()); - Assert.assertNotEquals(processDefinitions.size(), 0); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessInstanceMapMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessInstanceMapMapperTest.java deleted file mode 100644 index cb3b5d0623..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessInstanceMapMapperTest.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.ProcessInstanceMap; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ProcessInstanceMapMapperTest { - - - @Autowired - ProcessInstanceMapMapper processInstanceMapMapper; - - - private ProcessInstanceMap insertOne(){ - //insertOne - ProcessInstanceMap processInstanceMap = new ProcessInstanceMap(); - processInstanceMap.setProcessInstanceId(0); - processInstanceMap.setParentTaskInstanceId(0); - processInstanceMap.setParentProcessInstanceId(0); - processInstanceMapMapper.insert(processInstanceMap); - return processInstanceMap; - } - - @Test - public void testUpdate(){ - //insertOne - ProcessInstanceMap processInstanceMap = insertOne(); - //update - processInstanceMap.setParentProcessInstanceId(1); - int update = processInstanceMapMapper.updateById(processInstanceMap); - Assert.assertEquals(update, 1); - processInstanceMapMapper.deleteById(processInstanceMap.getId()); - } - - @Test - public void testDelete(){ - ProcessInstanceMap processInstanceMap = insertOne(); - int delete = processInstanceMapMapper.deleteById(processInstanceMap.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - ProcessInstanceMap processInstanceMap = insertOne(); - //query - List dataSources = processInstanceMapMapper.selectList(null); - Assert.assertNotEquals(dataSources.size(), 0); - processInstanceMapMapper.deleteById(processInstanceMap.getId()); - } - - @Test - public void testQueryByParentId() { - ProcessInstanceMap processInstanceMap = insertOne(); - - processInstanceMap.setParentProcessInstanceId(100); - processInstanceMapMapper.updateById(processInstanceMap); - ProcessInstanceMap map = - processInstanceMapMapper.queryByParentId(processInstanceMap.getParentProcessInstanceId(), processInstanceMap.getParentTaskInstanceId()); - Assert.assertNotEquals(map, null); - - - processInstanceMapMapper.deleteById(processInstanceMap.getId()); - } - - @Test - public void testQueryBySubProcessId() { - ProcessInstanceMap processInstanceMap = insertOne(); - - processInstanceMap.setProcessInstanceId(100); - processInstanceMapMapper.updateById(processInstanceMap); - ProcessInstanceMap map = - processInstanceMapMapper.queryBySubProcessId( - processInstanceMap.getProcessInstanceId() ); - Assert.assertNotEquals(map, null); - - processInstanceMapMapper.deleteById(processInstanceMap.getId()); - } - - @Test - public void testDeleteByParentProcessId() { - ProcessInstanceMap processInstanceMap = insertOne(); - - processInstanceMap.setParentProcessInstanceId(100); - processInstanceMapMapper.updateById(processInstanceMap); - int delete = processInstanceMapMapper.deleteByParentProcessId( - processInstanceMap.getParentProcessInstanceId() - ); - Assert.assertEquals(delete, 1); - } - - @Test - public void querySubIdListByParentId() { - ProcessInstanceMap processInstanceMap = insertOne(); - processInstanceMap.setProcessInstanceId(1); - processInstanceMap.setParentProcessInstanceId(1010); - - processInstanceMapMapper.updateById(processInstanceMap); - - List subIds = processInstanceMapMapper.querySubIdListByParentId(processInstanceMap.getParentProcessInstanceId()); - - Assert.assertNotEquals(subIds.size(), 0); - - processInstanceMapMapper.deleteById(processInstanceMap.getId()); - - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessInstanceMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessInstanceMapperTest.java deleted file mode 100644 index 2c5da78d43..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProcessInstanceMapperTest.java +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.Flag; -import cn.escheduler.common.enums.ReleaseState; -import cn.escheduler.dao.entity.*; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mortbay.jetty.servlet.AbstractSessionIdManager; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ProcessInstanceMapperTest { - - - @Autowired - ProcessInstanceMapper processInstanceMapper; - - @Autowired - ProcessDefinitionMapper processDefinitionMapper; - - @Autowired - ProjectMapper projectMapper; - - - private ProcessInstance insertOne(){ - //insertOne - ProcessInstance processInstance = new ProcessInstance(); - Date start = new Date(2019-1900, 1-1, 1, 0, 10,0); - Date end = new Date(2019-1900, 1-1, 1, 1, 0,0); - processInstance.setStartTime(start); - processInstance.setEndTime(end); - processInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); - - processInstanceMapper.insert(processInstance); - return processInstance; - } - - @Test - public void testUpdate(){ - //insertOne - ProcessInstance processInstanceMap = insertOne(); - //update - int update = processInstanceMapper.updateById(processInstanceMap); - Assert.assertEquals(update, 1); - processInstanceMapper.deleteById(processInstanceMap.getId()); - } - - @Test - public void testDelete(){ - ProcessInstance processInstanceMap = insertOne(); - int delete = processInstanceMapper.deleteById(processInstanceMap.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - ProcessInstance processInstance = insertOne(); - //query - List dataSources = processInstanceMapper.selectList(null); - Assert.assertNotEquals(dataSources.size(), 0); - processInstanceMapper.deleteById(processInstance.getId()); - } - - @Test - public void testQueryDetailById() { - ProcessInstance processInstance = insertOne(); - processInstanceMapper.updateById(processInstance); - - ProcessInstance processInstance1 = processInstanceMapper.queryDetailById(processInstance.getId()); - Assert.assertNotEquals(processInstance1, 50); - processInstanceMapper.deleteById(processInstance.getId()); - } - - @Test - public void testQueryByHostAndStatus() { - ProcessInstance processInstance = insertOne(); - processInstance.setHost("192.168.2.155"); - processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); - processInstanceMapper.updateById(processInstance); - - int[] stateArray = new int[]{ - ExecutionStatus.RUNNING_EXEUTION.ordinal(), - ExecutionStatus.SUCCESS.ordinal()}; - - processInstanceMapper.queryByHostAndStatus(processInstance.getHost(), stateArray); - - processInstanceMapper.deleteById(processInstance.getId()); - } - - @Test - public void testQueryProcessInstanceListPaging() { - - - int[] stateArray = new int[]{ - ExecutionStatus.RUNNING_EXEUTION.ordinal(), - ExecutionStatus.SUCCESS.ordinal()}; - - ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setProjectId(1010); - processDefinition.setReleaseState(ReleaseState.ONLINE); - processDefinitionMapper.insert(processDefinition); - - ProcessInstance processInstance = insertOne(); - processInstance.setProcessDefinitionId(processDefinition.getId()); - processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); - processInstance.setIsSubProcess(Flag.NO); - processInstance.setStartTime(new Date()); - - processInstanceMapper.updateById(processInstance); - - - Page page = new Page(1, 3); - - IPage processInstanceIPage = processInstanceMapper.queryProcessInstanceListPaging( - page, - processDefinition.getProjectId(), - processInstance.getProcessDefinitionId(), - processInstance.getName(), - stateArray, - processInstance.getHost(), - null, - null - ); - Assert.assertNotEquals(processInstanceIPage.getTotal(), 0); - - processDefinitionMapper.deleteById(processDefinition.getId()); - processInstanceMapper.deleteById(processInstance.getId()); - } - - @Test - public void testSetFailoverByHostAndStateArray() { - - int[] stateArray = new int[]{ - ExecutionStatus.RUNNING_EXEUTION.ordinal(), - ExecutionStatus.SUCCESS.ordinal()}; - - ProcessInstance processInstance = insertOne(); - - processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); - processInstance.setHost("192.168.2.220"); - processInstanceMapper.updateById(processInstance); - String host = processInstance.getHost(); - int update = processInstanceMapper.setFailoverByHostAndStateArray(host, stateArray); - Assert.assertNotEquals(update, 0); - - processInstance = processInstanceMapper.selectById(processInstance.getId()); - Assert.assertEquals(processInstance.getHost(), null); - processInstanceMapper.deleteById(processInstance.getId()); - } - - @Test - public void testUpdateProcessInstanceByState() { - - - ProcessInstance processInstance = insertOne(); - - processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); - processInstanceMapper.updateById(processInstance); - processInstanceMapper.updateProcessInstanceByState(ExecutionStatus.RUNNING_EXEUTION, ExecutionStatus.SUCCESS); - - ProcessInstance processInstance1 = processInstanceMapper.selectById(processInstance.getId()); - - processInstanceMapper.deleteById(processInstance.getId()); - Assert.assertEquals(processInstance1.getState(), ExecutionStatus.SUCCESS); - - } - - @Test - public void testCountInstanceStateByUser() { - - processDefinitionMapper.delete(null); - processInstanceMapper.delete(null); - - Project project = new Project(); - project.setName("testProject"); - projectMapper.insert(project); - - ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setProjectId(project.getId()); - - processDefinitionMapper.insert(processDefinition); - ProcessInstance processInstance = insertOne(); - processInstance.setProcessDefinitionId(processDefinition.getId()); - int update = processInstanceMapper.updateById(processInstance); - - Integer[] projectIds = new Integer[]{processDefinition.getProjectId()}; - - List executeStatusCounts = processInstanceMapper.countInstanceStateByUser(null, null, projectIds); - - - Assert.assertNotEquals(executeStatusCounts.size(), 0); - - projectMapper.deleteById(project.getId()); - processDefinitionMapper.deleteById(processDefinition.getId()); - processInstanceMapper.deleteById(processInstance.getId()); - } - - @Test - public void testQueryByProcessDefineId() { - ProcessInstance processInstance = insertOne(); - ProcessInstance processInstance1 = insertOne(); - - - List processInstances = processInstanceMapper.queryByProcessDefineId(processInstance.getProcessDefinitionId(), 1); - Assert.assertEquals(processInstances.size(), 1); - - processInstances = processInstanceMapper.queryByProcessDefineId(processInstance.getProcessDefinitionId(), 2); - Assert.assertEquals(processInstances.size(), 2); - - processInstanceMapper.deleteById(processInstance.getId()); - processInstanceMapper.deleteById(processInstance1.getId()); - } - - @Test - public void testQueryLastSchedulerProcess() { - ProcessInstance processInstance = insertOne(); - processInstance.setScheduleTime(new Date()); - processInstanceMapper.updateById(processInstance); - - ProcessInstance processInstance1 = processInstanceMapper.queryLastSchedulerProcess(processInstance.getProcessDefinitionId(), null, null ); - Assert.assertNotEquals(processInstance1, null); - processInstanceMapper.deleteById(processInstance.getId()); - } - - @Test - public void testQueryLastRunningProcess() { - ProcessInstance processInstance = insertOne(); - processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); - processInstanceMapper.updateById(processInstance); - - int[] stateArray = new int[]{ - ExecutionStatus.RUNNING_EXEUTION.ordinal(), - ExecutionStatus.SUBMITTED_SUCCESS.ordinal()}; - - ProcessInstance processInstance1 = processInstanceMapper.queryLastRunningProcess(processInstance.getProcessDefinitionId(), null, null , stateArray); - - Assert.assertNotEquals(processInstance1, null); - processInstanceMapper.deleteById(processInstance.getId()); - } - - @Test - public void testQueryLastManualProcess() { - ProcessInstance processInstance = insertOne(); - processInstanceMapper.updateById(processInstance); - - Date start = new Date(2019-1900, 1-1, 01, 0, 0, 0); - Date end = new Date(2019-1900, 1-1, 01, 5, 0, 0); - ProcessInstance processInstance1 = processInstanceMapper.queryLastManualProcess(processInstance.getProcessDefinitionId(),start, end - ); - Assert.assertEquals(processInstance1.getId(), processInstance.getId()); - - start = new Date(2019-1900, 1-1, 01, 1, 0, 0); - processInstance1 = processInstanceMapper.queryLastManualProcess(processInstance.getProcessDefinitionId(),start, end - ); - Assert.assertEquals(processInstance1, null); - - processInstanceMapper.deleteById(processInstance.getId()); - - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProjectMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProjectMapperTest.java deleted file mode 100644 index cf23e8101c..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProjectMapperTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ProjectMapperTest { - - @Test - public void testQueryDetailById() { - } - - @Test - public void testQueryProjectByName() { - } - - @Test - public void testQueryProjectListPaging() { - } - - @Test - public void testQueryAllProjectListPaging() { - } - - @Test - public void testQueryProjectCreatedByUser() { - } - - @Test - public void testQueryAuthedProjectListByUserId() { - } - - @Test - public void testQueryProjectExceptUserId() { - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProjectUserMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProjectUserMapperTest.java deleted file mode 100644 index a1b201c7e1..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ProjectUserMapperTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.dao.entity.ProjectUser; -import cn.escheduler.dao.entity.ProjectUser; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ProjectUserMapperTest { - - - @Autowired - ProjectUserMapper projectUserMapper; - - private ProjectUser insertOne(){ - //insertOne - ProjectUser projectUser = new ProjectUser(); - projectUser.setProjectId(1010); - projectUser.setUserId(111); - projectUserMapper.insert(projectUser); - return projectUser; - } - - @Test - public void testUpdate(){ - //insertOne - ProjectUser projectUser = insertOne(); - projectUser.setCreateTime(new Date()); - //update - int update = projectUserMapper.updateById(projectUser); - Assert.assertEquals(update, 1); - projectUserMapper.deleteById(projectUser.getId()); - } - - @Test - public void testDelete(){ - ProjectUser projectUserMap = insertOne(); - int delete = projectUserMapper.deleteById(projectUserMap.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - ProjectUser projectUser = insertOne(); - //query - List projectUsers = projectUserMapper.selectList(null); - Assert.assertNotEquals(projectUsers.size(), 0); - projectUserMapper.deleteById(projectUser.getId()); - } - - @Test - public void testDeleteProjectRelation() { - - - ProjectUser projectUser = insertOne(); - int delete = projectUserMapper.deleteProjectRelation(projectUser.getProjectId(), projectUser.getUserId()); - Assert.assertEquals(delete, 1); - - } - - @Test - public void testQueryProjectRelation() { - ProjectUser projectUser = insertOne(); - ProjectUser projectUser1 = projectUserMapper.queryProjectRelation(projectUser.getProjectId(), projectUser.getUserId()); - Assert.assertNotEquals(projectUser1, null); - - projectUserMapper.deleteById(projectUser.getId()); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/QueueMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/QueueMapperTest.java deleted file mode 100644 index 9ab0e1ce43..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/QueueMapperTest.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.dao.entity.Queue; -import cn.escheduler.dao.entity.Queue; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class QueueMapperTest { - - - @Autowired - QueueMapper queueMapper; - - - private Queue insertOne(){ - //insertOne - Queue queue = new Queue(); - queue.setQueueName("queue"); - queue.setQueue("queue"); - queue.setCreateTime(new Date()); - queue.setUpdateTime(new Date()); - queueMapper.insert(queue); - return queue; - } - - @Test - public void testUpdate(){ - //insertOne - Queue queue = insertOne(); - queue.setCreateTime(new Date()); - //update - int update = queueMapper.updateById(queue); - Assert.assertEquals(update, 1); - queueMapper.deleteById(queue.getId()); - } - - @Test - public void testDelete(){ - Queue queue = insertOne(); - int delete = queueMapper.deleteById(queue.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - Queue queue = insertOne(); - //query - List queues = queueMapper.selectList(null); - Assert.assertNotEquals(queues.size(), 0); - queueMapper.deleteById(queue.getId()); - } - - @Test - public void testQueryQueuePaging() { - - Queue queue = insertOne(); - Page page = new Page(1,3); - - IPage queueIPage= queueMapper.queryQueuePaging(page, - null); - Assert.assertNotEquals(queueIPage.getTotal(), 0); - - queueIPage= queueMapper.queryQueuePaging(page, - queue.getQueueName()); - Assert.assertNotEquals(queueIPage.getTotal(), 0); - queueMapper.deleteById(queue.getId()); - } - - @Test - public void queryAllQueueList() { - Queue queue = insertOne(); - - List queues = queueMapper.queryAllQueueList(queue.getQueue(), null); - Assert.assertNotEquals(queues.size(), 0); - - queues = queueMapper.queryAllQueueList(null, queue.getQueueName()); - Assert.assertNotEquals(queues.size(), 0); - queueMapper.deleteById(queue.getId()); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ResourceMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ResourceMapperTest.java deleted file mode 100644 index 34c0e26918..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ResourceMapperTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ResourceMapperTest { - - @Test - public void testQueryResourceList() { - } - - @Test - public void testQueryResourcePaging() { - } - - @Test - public void testQueryResourceListAuthored() { - } - - @Test - public void testQueryAuthorizedResourceList() { - } - - @Test - public void testQueryResourceExceptUserId() { - } - - @Test - public void testQueryTenantCodeByResourceName() { - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ResourceUserMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ResourceUserMapperTest.java deleted file mode 100644 index a492a86913..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ResourceUserMapperTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.dao.entity.ResourcesUser; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ResourceUserMapperTest { - - - - @Autowired - ResourceUserMapper resourceUserMapper; - - private ResourcesUser insertOne(){ - //insertOne - ResourcesUser queue = new ResourcesUser(); - queue.setCreateTime(new Date()); - queue.setUpdateTime(new Date()); - queue.setUserId(11111); - queue.setResourcesId(1110); - resourceUserMapper.insert(queue); - return queue; - } - - @Test - public void testUpdate(){ - //insertOne - ResourcesUser queue = insertOne(); - queue.setCreateTime(new Date()); - //update - int update = resourceUserMapper.updateById(queue); - Assert.assertEquals(update, 1); - resourceUserMapper.deleteById(queue.getId()); - } - - @Test - public void testDelete(){ - ResourcesUser queue = insertOne(); - int delete = resourceUserMapper.deleteById(queue.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - ResourcesUser queue = insertOne(); - //query - List queues = resourceUserMapper.selectList(null); - Assert.assertNotEquals(queues.size(), 0); - resourceUserMapper.deleteById(queue.getId()); - } - - @Test - public void testDeleteResourceUser() { - - ResourcesUser queue = insertOne(); - int delete = resourceUserMapper.deleteResourceUser( - queue.getUserId(), - queue.getResourcesId()); - Assert.assertNotEquals(delete, 0); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ScheduleMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ScheduleMapperTest.java deleted file mode 100644 index 8483b6bd62..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/ScheduleMapperTest.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.common.enums.FailureStrategy; -import cn.escheduler.common.enums.ReleaseState; -import cn.escheduler.common.enums.WarningType; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.Project; -import cn.escheduler.dao.entity.Schedule; -import cn.escheduler.dao.entity.User; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ScheduleMapperTest { - - - @Autowired - ScheduleMapper scheduleMapper; - - @Autowired - UserMapper userMapper; - - @Autowired - ProjectMapper projectMapper; - - @Autowired - ProcessDefinitionMapper processDefinitionMapper; - - private Schedule insertOne(){ - //insertOne - Schedule schedule = new Schedule(); - schedule.setStartTime(new Date()); - schedule.setEndTime(new Date()); - schedule.setCrontab(""); - schedule.setFailureStrategy(FailureStrategy.CONTINUE); - schedule.setReleaseState(ReleaseState.OFFLINE); - schedule.setWarningType(WarningType.NONE); - schedule.setCreateTime(new Date()); - schedule.setUpdateTime(new Date()); - scheduleMapper.insert(schedule); - return schedule; - } - - @Test - public void testUpdate(){ - //insertOne - Schedule schedule = insertOne(); - schedule.setCreateTime(new Date()); - //update - int update = scheduleMapper.updateById(schedule); - Assert.assertEquals(update, 1); - scheduleMapper.deleteById(schedule.getId()); - } - - @Test - public void testDelete(){ - Schedule schedule = insertOne(); - int delete = scheduleMapper.deleteById(schedule.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - Schedule schedule = insertOne(); - //query - List schedules = scheduleMapper.selectList(null); - Assert.assertNotEquals(schedules.size(), 0); - scheduleMapper.deleteById(schedule.getId()); - } - - @Test - public void testQueryByProcessDefineIdPaging() { - - User user = new User(); - user.setUserName("ut name"); - userMapper.insert(user); - - Project project = new Project(); - project.setName("ut project"); - project.setUserId(user.getId()); - projectMapper.insert(project); - - ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setProjectId(project.getId()); - processDefinition.setUserId(user.getId()); - processDefinition.setLocations(""); - processDefinitionMapper.insert(processDefinition); - - Schedule schedule= insertOne(); - schedule.setUserId(user.getId()); - schedule.setProcessDefinitionId(processDefinition.getId()); - scheduleMapper.insert(schedule); - - Page page = new Page(1,3); - IPage scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging(page, - processDefinition.getId(), "" - ); - Assert.assertNotEquals(scheduleIPage.getSize(), 0); - - - projectMapper.deleteById(project.getId()); - processDefinitionMapper.deleteById(processDefinition.getId()); - userMapper.deleteById(user.getId()); - scheduleMapper.deleteById(schedule.getId()); - } - - @Test - public void testQuerySchedulerListByProjectName() { - - - User user = new User(); - user.setUserName("ut name"); - userMapper.insert(user); - - Project project = new Project(); - project.setName("ut project"); - project.setUserId(user.getId()); - projectMapper.insert(project); - - ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setProjectId(project.getId()); - processDefinition.setUserId(user.getId()); - processDefinition.setLocations(""); - processDefinitionMapper.insert(processDefinition); - - Schedule schedule= insertOne(); - schedule.setUserId(user.getId()); - schedule.setProcessDefinitionId(processDefinition.getId()); - scheduleMapper.insert(schedule); - - Page page = new Page(1,3); - List schedules = scheduleMapper.querySchedulerListByProjectName( - project.getName() - ); - projectMapper.deleteById(project.getId()); - processDefinitionMapper.deleteById(processDefinition.getId()); - userMapper.deleteById(user.getId()); - scheduleMapper.deleteById(schedule.getId()); - - Assert.assertNotEquals(schedules.size(), 0); - } - - @Test - public void testSelectAllByProcessDefineArray() { - - Schedule schedule = insertOne(); - schedule.setProcessDefinitionId(12345); - schedule.setReleaseState(ReleaseState.ONLINE); - scheduleMapper.updateById(schedule); - - List schedules= scheduleMapper.selectAllByProcessDefineArray(new int[] {schedule.getProcessDefinitionId()}); - scheduleMapper.deleteById(schedule.getId()); - Assert.assertNotEquals(schedules.size(), 0); - } - - @Test - public void queryByProcessDefinitionId() { - Schedule schedule = insertOne(); - schedule.setProcessDefinitionId(12345); - scheduleMapper.updateById(schedule); - - List schedules= scheduleMapper.queryByProcessDefinitionId(schedule.getProcessDefinitionId()); - scheduleMapper.deleteById(schedule.getId()); - Assert.assertNotEquals(schedules.size(), 0); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/SessionMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/SessionMapperTest.java deleted file mode 100644 index aa81544030..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/SessionMapperTest.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.dao.entity.Session; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class SessionMapperTest { - - @Autowired - SessionMapper sessionMapper; - - private Session insertOne(){ - //insertOne - Session session = new Session(); - session.setLastLoginTime(new Date()); - session.setUserId(11111); - sessionMapper.insert(session); - return session; - } - - @Test - public void testUpdate(){ - //insertOne - Session session = insertOne(); - session.setLastLoginTime(new Date()); - //update - int update = sessionMapper.updateById(session); - Assert.assertEquals(update, 1); - sessionMapper.deleteById(session.getId()); - } - - @Test - public void testDelete(){ - Session session = insertOne(); - int delete = sessionMapper.deleteById(session.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - Session session = insertOne(); - //query - List sessions = sessionMapper.selectList(null); - Assert.assertNotEquals(sessions.size(), 0); - sessionMapper.deleteById(session.getId()); - } - - @Test - public void testQueryByUserId() { - Session session = insertOne(); - List sessions = sessionMapper.queryByUserId(session.getUserId()); - Assert.assertNotEquals(sessions.size(), 0); - - sessionMapper.deleteById(session.getId()); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/TaskInstanceMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/TaskInstanceMapperTest.java deleted file mode 100644 index b227c1194e..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/TaskInstanceMapperTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class TaskInstanceMapperTest { - - @Test - public void testQueryTaskByProcessIdAndState() { - } - - @Test - public void testQueryById() { - } - - @Test - public void testFindValidTaskListByProcessId() { - } - - @Test - public void testQueryByHostAndStatus() { - } - - @Test - public void testSetFailoverByHostAndStateArray() { - } - - @Test - public void testQueryByInstanceIdAndName() { - } - - @Test - public void testCountTask() { - } - - @Test - public void testCountTaskInstanceStateByUser() { - } - - @Test - public void testQueryTaskInstanceListPaging() { - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/TenantMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/TenantMapperTest.java deleted file mode 100644 index 56c1b2b57e..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/TenantMapperTest.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.dao.entity.Queue; -import cn.escheduler.dao.entity.Tenant; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class TenantMapperTest { - - @Autowired - TenantMapper tenantMapper; - - @Autowired - QueueMapper queueMapper; - - private Tenant insertOne(){ - //insertOne - Tenant tenant = new Tenant(); - tenant.setCreateTime(new Date()); - tenant.setUpdateTime(new Date()); - tenantMapper.insert(tenant); - return tenant; - } - - @Test - public void testUpdate(){ - //insertOne - Tenant tenant = insertOne(); - tenant.setUpdateTime(new Date()); - //update - int update = tenantMapper.updateById(tenant); - Assert.assertEquals(update, 1); - tenantMapper.deleteById(tenant.getId()); - } - - @Test - public void testDelete(){ - Tenant tenant = insertOne(); - int delete = tenantMapper.deleteById(tenant.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - Tenant tenant = insertOne(); - //query - List tenants = tenantMapper.selectList(null); - Assert.assertNotEquals(tenants.size(), 0); - tenantMapper.deleteById(tenant.getId()); - } - - @Test - public void testQueryById() { - - Queue queue = new Queue(); - queue.setQueueName("ut queue name"); - queue.setQueue("ut queue"); - queueMapper.insert(queue); - - - Tenant tenant = insertOne(); - tenant.setQueueId(queue.getId()); - tenantMapper.updateById(tenant); - - Tenant tenant1 = tenantMapper.queryById(tenant.getId()); - - tenantMapper.deleteById(tenant.getId()); - Assert.assertNotEquals(tenant1, null); - } - - @Test - public void testQueryByTenantCode() { - - Tenant tenant = insertOne(); - tenant.setTenantCode("ut code"); - tenantMapper.updateById(tenant); - - List tenant1 = tenantMapper.queryByTenantCode(tenant.getTenantCode()); - - tenantMapper.deleteById(tenant.getId()); - Assert.assertNotEquals(tenant1.size(), 0); - } - - @Test - public void testQueryTenantPaging() { - Tenant tenant = insertOne(); - tenant.setTenantCode("ut code"); - tenant.setTenantName("ut name"); - tenantMapper.updateById(tenant); - Page page = new Page(1,3); - - IPage tenantIPage = tenantMapper.queryTenantPaging(page, tenant.getTenantName()); - - tenantMapper.deleteById(tenant.getId()); - Assert.assertNotEquals(tenantIPage.getTotal(), 0); - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UDFUserMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UDFUserMapperTest.java deleted file mode 100644 index a49a5913f3..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UDFUserMapperTest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class UDFUserMapperTest { - - @Test - public void testDeleteByUserId() { - } - - @Test - public void testDeleteByUdfFuncId() { - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UdfFuncMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UdfFuncMapperTest.java deleted file mode 100644 index 795c899c42..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UdfFuncMapperTest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class UdfFuncMapperTest { - - @Test - public void testQueryUdfByIdStr() { - } - - @Test - public void testQueryUdfFuncPaging() { - } - - @Test - public void testGetUdfFuncByType() { - } - - @Test - public void testQueryUdfFuncExceptUserId() { - } - - @Test - public void testQueryAuthedUdfFunc() { - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserAlertGroupMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserAlertGroupMapperTest.java deleted file mode 100644 index 3a11a432b4..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserAlertGroupMapperTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class UserAlertGroupMapperTest { - - @Test - public void testQueryForUser() { - } - - @Test - public void testDeleteByAlertgroupId() { - } - - @Test - public void testListUserByAlertgroupId() { - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserMapperTest.java deleted file mode 100644 index 92721682a8..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/UserMapperTest.java +++ /dev/null @@ -1,326 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - -import cn.escheduler.common.enums.AlertType; -import cn.escheduler.common.enums.UserType; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.dao.entity.*; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class UserMapperTest { - @Autowired - private UserMapper userMapper; - - @Autowired - AlertGroupMapper alertGroupMapper; - - @Autowired - private UserAlertGroupMapper userAlertGroupMapper; - - @Autowired - AccessTokenMapper accessTokenMapper; - - @Autowired - TenantMapper tenantMapper; - - @Autowired - QueueMapper queueMapper; - - /** - * insert one user - * @return - */ - private User insertOne(){ - User user = new User(); - user.setUserName("user1"); - user.setUserPassword("1"); - user.setEmail("xx@123.com"); - user.setUserType(UserType.GENERAL_USER); - user.setCreateTime(new Date()); - user.setTenantId(1); - user.setUpdateTime(new Date()); - userMapper.insert(user); - return user; - } - - /** - * insert one user - * @param tenant - * @return - */ - private User insertOne(Tenant tenant){ - User user = new User(); - user.setUserName("user1"); - user.setUserPassword("1"); - user.setEmail("xx@123.com"); - user.setUserType(UserType.GENERAL_USER); - user.setCreateTime(new Date()); - user.setTenantId(tenant.getId()); - user.setUpdateTime(new Date()); - userMapper.insert(user); - return user; - } - - /** - * insert one user - * @param queue - * @param tenant - * @return - */ - private User insertOne(Queue queue,Tenant tenant){ - User user = new User(); - user.setUserName("user1"); - user.setUserPassword("1"); - user.setEmail("xx@123.com"); - user.setUserType(UserType.GENERAL_USER); - user.setCreateTime(new Date()); - user.setTenantId(tenant.getId()); - user.setQueue(queue.getQueueName()); - user.setUpdateTime(new Date()); - userMapper.insert(user); - return user; - } - - /** - * insert one AlertGroup - * @return - */ - private AlertGroup insertOneAlertGroup(){ - //insertOne - AlertGroup alertGroup = new AlertGroup(); - alertGroup.setGroupName("alert group 1"); - alertGroup.setDescription("alert test1"); - alertGroup.setGroupType(AlertType.EMAIL); - - alertGroup.setCreateTime(new Date()); - alertGroup.setUpdateTime(new Date()); - alertGroupMapper.insert(alertGroup); - return alertGroup; - } - - /** - * insert one UserAlertGroup - * @param user - * @param alertGroup - * @return - */ - private UserAlertGroup insertOneUserAlertGroup(User user,AlertGroup alertGroup){ - UserAlertGroup userAlertGroup = new UserAlertGroup(); - userAlertGroup.setAlertgroupName(alertGroup.getGroupName()); - userAlertGroup.setAlertgroupId(alertGroup.getId()); - userAlertGroup.setUserId(user.getId()); - userAlertGroup.setCreateTime(new Date()); - userAlertGroup.setUpdateTime(new Date()); - userAlertGroupMapper.insert(userAlertGroup); - return userAlertGroup; - } - - /** - * insert one AccessToken - * @param user - * @return - */ - private AccessToken insertOneAccessToken(User user){ - //insertOne - AccessToken accessToken = new AccessToken(); - accessToken.setUserId(user.getId()); - accessToken.setToken("secrettoken"); - accessToken.setCreateTime(new Date()); - accessToken.setUpdateTime(new Date()); - accessToken.setExpireTime(DateUtils.getSomeHourOfDay(new Date(),-1)); - accessTokenMapper.insert(accessToken); - return accessToken; - } - - /** - * insert one Tenant - * @return - */ - private Tenant insertOneTenant(){ - Tenant tenant = new Tenant(); - tenant.setTenantCode("dolphin"); - tenant.setTenantName("dolphin test"); - tenant.setDescription("dolphin user use"); - tenant.setQueue("1"); - tenant.setCreateTime(new Date()); - tenant.setUpdateTime(new Date()); - tenantMapper.insert(tenant); - return tenant; - } - - /** - * insert one Queue - * @return - */ - private Queue insertOneQueue(){ - Queue queue = new Queue(); - queue.setQueue("dolphin"); - queue.setQueueName("dolphin queue"); - queue.setCreateTime(new Date()); - queue.setUpdateTime(new Date()); - queueMapper.insert(queue); - return queue; - } - - @Test - public void testUpdate(){ - //insertOne - User user = insertOne(); - //update - user.setEmail("xx-update@126.com"); - user.setUserName("user1_update"); - user.setUserType(UserType.ADMIN_USER); - int update = userMapper.updateById(user); - Assert.assertEquals(update, 1); - userMapper.deleteById(user.getId()); - } - - @Test - public void testDelete(){ - //insertOne - User user = insertOne(); - //delete - int delete = userMapper.deleteById(user.getId()); - Assert.assertEquals(delete, 1); - userMapper.deleteById(user.getId()); - } - - @Test - public void testQuery() { - //insertOne - User user = insertOne(); - //query - List userList = userMapper.selectList(null); - Assert.assertNotEquals(userList.size(), 0); - userMapper.deleteById(user.getId()); - } - - @Test - public void testQueryAllGeneralUser() { - //insertOne - User user = insertOne(); - //queryAllGeneralUser - List userList = userMapper.queryAllGeneralUser(); - Assert.assertNotEquals(userList.size(), 0); - userMapper.deleteById(user.getId()); - } - - @Test - public void testQueryByUserNameAccurately() { - //insertOne - User user = insertOne(); - //queryByUserNameAccurately - User queryUser = userMapper.queryByUserNameAccurately(user.getUserName()); - Assert.assertEquals(queryUser.getUserName(), user.getUserName()); - userMapper.deleteById(user.getId()); - } - - @Test - public void testQueryUserByNamePassword() { - //insertOne - User user = insertOne(); - //queryUserByNamePassword - User queryUser = userMapper.queryUserByNamePassword(user.getUserName(),user.getUserPassword()); - Assert.assertEquals(queryUser.getUserName(),user.getUserName()); - Assert.assertEquals(queryUser.getUserPassword(),user.getUserPassword()); - userMapper.deleteById(user.getId()); - } - - @Test - public void testQueryUserPaging() { - //insertOneQueue - Queue queue = insertOneQueue(); - //insertOneTenant - Tenant tenant = insertOneTenant(); - //insertOne - User user = insertOne(queue,tenant); - //queryUserPaging - Page page = new Page(1,3); - IPage userIPage = userMapper.queryUserPaging(page, user.getUserName()); - Assert.assertNotEquals(userIPage.getTotal(), 0); - queueMapper.deleteById(queue.getId()); - tenantMapper.deleteById(tenant.getId()); - userMapper.deleteById(user.getId()); - } - - @Test - public void testQueryDetailsById() { - //insertOne - User user = insertOne(); - //queryDetailsById - User queryUser = userMapper.queryDetailsById(user.getId()); - Assert.assertEquals(queryUser,user); - userMapper.deleteById(user.getId()); - } - - @Test - public void testQueryUserListByAlertGroupId() { - //insertOne - User user = insertOne(); - //insertOneAlertGroup - AlertGroup alertGroup = insertOneAlertGroup(); - //insertOneUserAlertGroup - UserAlertGroup userAlertGroup = insertOneUserAlertGroup(user, alertGroup); - //queryUserListByAlertGroupId - List userList = userMapper.queryUserListByAlertGroupId(userAlertGroup.getAlertgroupId()); - Assert.assertNotEquals(userList.size(), 0); - userMapper.deleteById(user.getId()); - alertGroupMapper.deleteById(alertGroup.getId()); - userAlertGroupMapper.deleteById(userAlertGroup.getAlertgroupId()); - - } - - @Test - public void testQueryTenantCodeByUserId() { - //insertOneTenant - Tenant tenant = insertOneTenant(); - //insertOne - User user = insertOne(tenant); - //queryTenantCodeByUserId - User queryUser = userMapper.queryTenantCodeByUserId(user.getId()); - Assert.assertEquals(queryUser,user); - userMapper.deleteById(user.getId()); - tenantMapper.deleteById(tenant.getId()); - } - - @Test - public void testQueryUserByToken() { - //insertOne - User user = insertOne(); - //insertOneAccessToken - AccessToken accessToken = insertOneAccessToken(user); - //queryUserByToken - User userToken = userMapper.queryUserByToken(accessToken.getToken()); - Assert.assertEquals(userToken,user); - userMapper.deleteById(user.getId()); - accessTokenMapper.deleteById(accessToken.getId()); - - } -} \ No newline at end of file diff --git a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/WorkerGroupMapperTest.java b/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/WorkerGroupMapperTest.java deleted file mode 100644 index ff215e625b..0000000000 --- a/escheduler-dao/src/test/java/cn/escheduler/dao/mapper/WorkerGroupMapperTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.dao.mapper; - - -import cn.escheduler.dao.entity.WorkerGroup; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class WorkerGroupMapperTest { - @Autowired - WorkerGroupMapper workerGroupMapper; - - private WorkerGroup insertOne(){ - //insertOne - WorkerGroup workerGroup = new WorkerGroup(); - - String name = "workerGroup3"; - workerGroup.setName(name); - workerGroup.setIpList("192.168.220.154,192.168.220.188"); - workerGroup.setCreateTime(new Date()); - workerGroup.setUpdateTime(new Date()); - workerGroupMapper.insert(workerGroup); - return workerGroup; - } - - - @Test - public void testUpdate(){ - //insertOne - WorkerGroup workerGroup = insertOne(); - //update - workerGroup.setName("workerGroup11"); - int update = workerGroupMapper.updateById(workerGroup); - Assert.assertEquals(update, 1); - workerGroupMapper.deleteById(workerGroup.getId()); - } - - @Test - public void testDelete(){ - //insertOne - WorkerGroup workerGroup = insertOne(); - //delete - int delete = workerGroupMapper.deleteById(workerGroup.getId()); - Assert.assertEquals(delete, 1); - } - - @Test - public void testQuery() { - //insertOne - WorkerGroup workerGroup = insertOne(); - //query - List workerGroupList = workerGroupMapper.selectList(null); - Assert.assertNotEquals(workerGroupList.size(), 0); - workerGroupMapper.deleteById(workerGroup.getId()); - } - - @Test - public void testQueryAllWorkerGroup() { - //insertOne - WorkerGroup workerGroup = insertOne(); - //queryAllWorkerGroup - List workerGroupList = workerGroupMapper.queryAllWorkerGroup(); - Assert.assertNotEquals(workerGroupList.size(), 0); - workerGroupMapper.deleteById(workerGroup.getId()); - } - - @Test - public void testQueryWorkerGroupByName() { - //insertOne - WorkerGroup workerGroup = insertOne(); - //queryWorkerGroupByName - List workerGroupList = workerGroupMapper.queryWorkerGroupByName(workerGroup.getName()); - Assert.assertNotEquals(workerGroupList.size(), 0); - workerGroupMapper.deleteById(workerGroup.getId()); - } - - @Test - public void testQueryListPaging() { - //insertOne - WorkerGroup workerGroup = insertOne(); - //queryListPaging - Page page = new Page(1,3); - IPage workerGroupIPage = workerGroupMapper.queryListPaging(page, workerGroup.getName()); - Assert.assertNotEquals(workerGroupIPage.getTotal(), 0); - workerGroupMapper.deleteById(workerGroup.getId()); - } -} \ No newline at end of file diff --git a/escheduler-rpc/pom.xml b/escheduler-rpc/pom.xml deleted file mode 100644 index e3d7c91a92..0000000000 --- a/escheduler-rpc/pom.xml +++ /dev/null @@ -1,121 +0,0 @@ - - - - - escheduler - cn.analysys - 1.1.0-SNAPSHOT - - 4.0.0 - - escheduler-rpc - - escheduler-rpc - https://github.com/analysys/EasyScheduler - - - UTF-8 - 1.7 - 1.7 - - ${project.basedir}/src/main/java - 3.5.1 - 1.9.0 - - - - - com.google.protobuf - protobuf-java - ${protobuf.version} - - - io.grpc - grpc-netty - ${grpc.version} - - - io.grpc - grpc-protobuf - ${grpc.version} - - - io.grpc - grpc-stub - ${grpc.version} - - - - com.google.guava - guava - - - - - - - kr.motd.maven - os-maven-plugin - 1.5.0.Final - - - - - org.xolstice.maven.plugins - protobuf-maven-plugin - 0.5.0 - - com.google.protobuf:protoc:3.5.1-1:exe:${os.detected.classifier} - grpc-java - io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier} - - - - compile - - compile - - - - compile-custom - - compile-custom - - - ${protobuf.output.directory} - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.7 - - - add-classes - generate-sources - - add-source - - - - ${protobuf.output.directory} - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - ${java.version} - ${java.version} - ${project.build.sourceEncoding} - - - - - diff --git a/escheduler-rpc/src/main/java/cn/escheduler/rpc/LogViewServiceGrpc.java b/escheduler-rpc/src/main/java/cn/escheduler/rpc/LogViewServiceGrpc.java deleted file mode 100644 index 8723eef842..0000000000 --- a/escheduler-rpc/src/main/java/cn/escheduler/rpc/LogViewServiceGrpc.java +++ /dev/null @@ -1,499 +0,0 @@ -package cn.escheduler.rpc; - -import static io.grpc.MethodDescriptor.generateFullMethodName; -import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall; -import static io.grpc.stub.ClientCalls.asyncClientStreamingCall; -import static io.grpc.stub.ClientCalls.asyncServerStreamingCall; -import static io.grpc.stub.ClientCalls.asyncUnaryCall; -import static io.grpc.stub.ClientCalls.blockingServerStreamingCall; -import static io.grpc.stub.ClientCalls.blockingUnaryCall; -import static io.grpc.stub.ClientCalls.futureUnaryCall; -import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall; -import static io.grpc.stub.ServerCalls.asyncClientStreamingCall; -import static io.grpc.stub.ServerCalls.asyncServerStreamingCall; -import static io.grpc.stub.ServerCalls.asyncUnaryCall; -import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall; -import static io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall; - -/** - *
- **
- *  log view service
- * 
- */ -@javax.annotation.Generated( - value = "by gRPC proto compiler (version 1.9.0)", - comments = "Source: scheduler.proto") -public final class LogViewServiceGrpc { - - private LogViewServiceGrpc() {} - - public static final String SERVICE_NAME = "schduler.LogViewService"; - - // Static method descriptors that strictly reflect the proto. - @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") - @java.lang.Deprecated // Use {@link #getRollViewLogMethod()} instead. - public static final io.grpc.MethodDescriptor METHOD_ROLL_VIEW_LOG = getRollViewLogMethod(); - - private static volatile io.grpc.MethodDescriptor getRollViewLogMethod; - - @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") - public static io.grpc.MethodDescriptor getRollViewLogMethod() { - io.grpc.MethodDescriptor getRollViewLogMethod; - if ((getRollViewLogMethod = LogViewServiceGrpc.getRollViewLogMethod) == null) { - synchronized (LogViewServiceGrpc.class) { - if ((getRollViewLogMethod = LogViewServiceGrpc.getRollViewLogMethod) == null) { - LogViewServiceGrpc.getRollViewLogMethod = getRollViewLogMethod = - io.grpc.MethodDescriptor.newBuilder() - .setType(io.grpc.MethodDescriptor.MethodType.UNARY) - .setFullMethodName(generateFullMethodName( - "schduler.LogViewService", "rollViewLog")) - .setSampledToLocalTracing(true) - .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( - cn.escheduler.rpc.LogParameter.getDefaultInstance())) - .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( - cn.escheduler.rpc.RetStrInfo.getDefaultInstance())) - .setSchemaDescriptor(new LogViewServiceMethodDescriptorSupplier("rollViewLog")) - .build(); - } - } - } - return getRollViewLogMethod; - } - @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") - @java.lang.Deprecated // Use {@link #getViewLogMethod()} instead. - public static final io.grpc.MethodDescriptor METHOD_VIEW_LOG = getViewLogMethod(); - - private static volatile io.grpc.MethodDescriptor getViewLogMethod; - - @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") - public static io.grpc.MethodDescriptor getViewLogMethod() { - io.grpc.MethodDescriptor getViewLogMethod; - if ((getViewLogMethod = LogViewServiceGrpc.getViewLogMethod) == null) { - synchronized (LogViewServiceGrpc.class) { - if ((getViewLogMethod = LogViewServiceGrpc.getViewLogMethod) == null) { - LogViewServiceGrpc.getViewLogMethod = getViewLogMethod = - io.grpc.MethodDescriptor.newBuilder() - .setType(io.grpc.MethodDescriptor.MethodType.UNARY) - .setFullMethodName(generateFullMethodName( - "schduler.LogViewService", "viewLog")) - .setSampledToLocalTracing(true) - .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( - cn.escheduler.rpc.PathParameter.getDefaultInstance())) - .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( - cn.escheduler.rpc.RetStrInfo.getDefaultInstance())) - .setSchemaDescriptor(new LogViewServiceMethodDescriptorSupplier("viewLog")) - .build(); - } - } - } - return getViewLogMethod; - } - @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") - @java.lang.Deprecated // Use {@link #getGetLogBytesMethod()} instead. - public static final io.grpc.MethodDescriptor METHOD_GET_LOG_BYTES = getGetLogBytesMethod(); - - private static volatile io.grpc.MethodDescriptor getGetLogBytesMethod; - - @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") - public static io.grpc.MethodDescriptor getGetLogBytesMethod() { - io.grpc.MethodDescriptor getGetLogBytesMethod; - if ((getGetLogBytesMethod = LogViewServiceGrpc.getGetLogBytesMethod) == null) { - synchronized (LogViewServiceGrpc.class) { - if ((getGetLogBytesMethod = LogViewServiceGrpc.getGetLogBytesMethod) == null) { - LogViewServiceGrpc.getGetLogBytesMethod = getGetLogBytesMethod = - io.grpc.MethodDescriptor.newBuilder() - .setType(io.grpc.MethodDescriptor.MethodType.UNARY) - .setFullMethodName(generateFullMethodName( - "schduler.LogViewService", "getLogBytes")) - .setSampledToLocalTracing(true) - .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( - cn.escheduler.rpc.PathParameter.getDefaultInstance())) - .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( - cn.escheduler.rpc.RetByteInfo.getDefaultInstance())) - .setSchemaDescriptor(new LogViewServiceMethodDescriptorSupplier("getLogBytes")) - .build(); - } - } - } - return getGetLogBytesMethod; - } - - /** - * Creates a new async stub that supports all call types for the service - */ - public static LogViewServiceStub newStub(io.grpc.Channel channel) { - return new LogViewServiceStub(channel); - } - - /** - * Creates a new blocking-style stub that supports unary and streaming output calls on the service - */ - public static LogViewServiceBlockingStub newBlockingStub( - io.grpc.Channel channel) { - return new LogViewServiceBlockingStub(channel); - } - - /** - * Creates a new ListenableFuture-style stub that supports unary calls on the service - */ - public static LogViewServiceFutureStub newFutureStub( - io.grpc.Channel channel) { - return new LogViewServiceFutureStub(channel); - } - - /** - *
-   **
-   *  log view service
-   * 
- */ - public static abstract class LogViewServiceImplBase implements io.grpc.BindableService { - - /** - *
-     **
-     *  roll view log
-     * 
- */ - public void rollViewLog(cn.escheduler.rpc.LogParameter request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnimplementedUnaryCall(getRollViewLogMethod(), responseObserver); - } - - /** - *
-     **
-     * view all log
-     * 
- */ - public void viewLog(cn.escheduler.rpc.PathParameter request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnimplementedUnaryCall(getViewLogMethod(), responseObserver); - } - - /** - *
-     **
-     * get log bytes
-     * 
- */ - public void getLogBytes(cn.escheduler.rpc.PathParameter request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnimplementedUnaryCall(getGetLogBytesMethod(), responseObserver); - } - - @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { - return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) - .addMethod( - getRollViewLogMethod(), - asyncUnaryCall( - new MethodHandlers< - cn.escheduler.rpc.LogParameter, - cn.escheduler.rpc.RetStrInfo>( - this, METHODID_ROLL_VIEW_LOG))) - .addMethod( - getViewLogMethod(), - asyncUnaryCall( - new MethodHandlers< - cn.escheduler.rpc.PathParameter, - cn.escheduler.rpc.RetStrInfo>( - this, METHODID_VIEW_LOG))) - .addMethod( - getGetLogBytesMethod(), - asyncUnaryCall( - new MethodHandlers< - cn.escheduler.rpc.PathParameter, - cn.escheduler.rpc.RetByteInfo>( - this, METHODID_GET_LOG_BYTES))) - .build(); - } - } - - /** - *
-   **
-   *  log view service
-   * 
- */ - public static final class LogViewServiceStub extends io.grpc.stub.AbstractStub { - private LogViewServiceStub(io.grpc.Channel channel) { - super(channel); - } - - private LogViewServiceStub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected LogViewServiceStub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new LogViewServiceStub(channel, callOptions); - } - - /** - *
-     **
-     *  roll view log
-     * 
- */ - public void rollViewLog(cn.escheduler.rpc.LogParameter request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(getRollViewLogMethod(), getCallOptions()), request, responseObserver); - } - - /** - *
-     **
-     * view all log
-     * 
- */ - public void viewLog(cn.escheduler.rpc.PathParameter request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(getViewLogMethod(), getCallOptions()), request, responseObserver); - } - - /** - *
-     **
-     * get log bytes
-     * 
- */ - public void getLogBytes(cn.escheduler.rpc.PathParameter request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(getGetLogBytesMethod(), getCallOptions()), request, responseObserver); - } - } - - /** - *
-   **
-   *  log view service
-   * 
- */ - public static final class LogViewServiceBlockingStub extends io.grpc.stub.AbstractStub { - private LogViewServiceBlockingStub(io.grpc.Channel channel) { - super(channel); - } - - private LogViewServiceBlockingStub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected LogViewServiceBlockingStub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new LogViewServiceBlockingStub(channel, callOptions); - } - - /** - *
-     **
-     *  roll view log
-     * 
- */ - public cn.escheduler.rpc.RetStrInfo rollViewLog(cn.escheduler.rpc.LogParameter request) { - return blockingUnaryCall( - getChannel(), getRollViewLogMethod(), getCallOptions(), request); - } - - /** - *
-     **
-     * view all log
-     * 
- */ - public cn.escheduler.rpc.RetStrInfo viewLog(cn.escheduler.rpc.PathParameter request) { - return blockingUnaryCall( - getChannel(), getViewLogMethod(), getCallOptions(), request); - } - - /** - *
-     **
-     * get log bytes
-     * 
- */ - public cn.escheduler.rpc.RetByteInfo getLogBytes(cn.escheduler.rpc.PathParameter request) { - return blockingUnaryCall( - getChannel(), getGetLogBytesMethod(), getCallOptions(), request); - } - } - - /** - *
-   **
-   *  log view service
-   * 
- */ - public static final class LogViewServiceFutureStub extends io.grpc.stub.AbstractStub { - private LogViewServiceFutureStub(io.grpc.Channel channel) { - super(channel); - } - - private LogViewServiceFutureStub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected LogViewServiceFutureStub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new LogViewServiceFutureStub(channel, callOptions); - } - - /** - *
-     **
-     *  roll view log
-     * 
- */ - public com.google.common.util.concurrent.ListenableFuture rollViewLog( - cn.escheduler.rpc.LogParameter request) { - return futureUnaryCall( - getChannel().newCall(getRollViewLogMethod(), getCallOptions()), request); - } - - /** - *
-     **
-     * view all log
-     * 
- */ - public com.google.common.util.concurrent.ListenableFuture viewLog( - cn.escheduler.rpc.PathParameter request) { - return futureUnaryCall( - getChannel().newCall(getViewLogMethod(), getCallOptions()), request); - } - - /** - *
-     **
-     * get log bytes
-     * 
- */ - public com.google.common.util.concurrent.ListenableFuture getLogBytes( - cn.escheduler.rpc.PathParameter request) { - return futureUnaryCall( - getChannel().newCall(getGetLogBytesMethod(), getCallOptions()), request); - } - } - - private static final int METHODID_ROLL_VIEW_LOG = 0; - private static final int METHODID_VIEW_LOG = 1; - private static final int METHODID_GET_LOG_BYTES = 2; - - private static final class MethodHandlers implements - io.grpc.stub.ServerCalls.UnaryMethod, - io.grpc.stub.ServerCalls.ServerStreamingMethod, - io.grpc.stub.ServerCalls.ClientStreamingMethod, - io.grpc.stub.ServerCalls.BidiStreamingMethod { - private final LogViewServiceImplBase serviceImpl; - private final int methodId; - - MethodHandlers(LogViewServiceImplBase serviceImpl, int methodId) { - this.serviceImpl = serviceImpl; - this.methodId = methodId; - } - - @java.lang.Override - @java.lang.SuppressWarnings("unchecked") - public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { - switch (methodId) { - case METHODID_ROLL_VIEW_LOG: - serviceImpl.rollViewLog((cn.escheduler.rpc.LogParameter) request, - (io.grpc.stub.StreamObserver) responseObserver); - break; - case METHODID_VIEW_LOG: - serviceImpl.viewLog((cn.escheduler.rpc.PathParameter) request, - (io.grpc.stub.StreamObserver) responseObserver); - break; - case METHODID_GET_LOG_BYTES: - serviceImpl.getLogBytes((cn.escheduler.rpc.PathParameter) request, - (io.grpc.stub.StreamObserver) responseObserver); - break; - default: - throw new AssertionError(); - } - } - - @java.lang.Override - @java.lang.SuppressWarnings("unchecked") - public io.grpc.stub.StreamObserver invoke( - io.grpc.stub.StreamObserver responseObserver) { - switch (methodId) { - default: - throw new AssertionError(); - } - } - } - - private static abstract class LogViewServiceBaseDescriptorSupplier - implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { - LogViewServiceBaseDescriptorSupplier() {} - - @java.lang.Override - public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { - return cn.escheduler.rpc.SchdulerProto.getDescriptor(); - } - - @java.lang.Override - public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { - return getFileDescriptor().findServiceByName("LogViewService"); - } - } - - private static final class LogViewServiceFileDescriptorSupplier - extends LogViewServiceBaseDescriptorSupplier { - LogViewServiceFileDescriptorSupplier() {} - } - - private static final class LogViewServiceMethodDescriptorSupplier - extends LogViewServiceBaseDescriptorSupplier - implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { - private final String methodName; - - LogViewServiceMethodDescriptorSupplier(String methodName) { - this.methodName = methodName; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { - return getServiceDescriptor().findMethodByName(methodName); - } - } - - private static volatile io.grpc.ServiceDescriptor serviceDescriptor; - - public static io.grpc.ServiceDescriptor getServiceDescriptor() { - io.grpc.ServiceDescriptor result = serviceDescriptor; - if (result == null) { - synchronized (LogViewServiceGrpc.class) { - result = serviceDescriptor; - if (result == null) { - serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) - .setSchemaDescriptor(new LogViewServiceFileDescriptorSupplier()) - .addMethod(getRollViewLogMethod()) - .addMethod(getViewLogMethod()) - .addMethod(getGetLogBytesMethod()) - .build(); - } - } - } - return result; - } -} diff --git a/escheduler-rpc/src/main/proto/scheduler.proto b/escheduler-rpc/src/main/proto/scheduler.proto deleted file mode 100644 index 7f1129740d..0000000000 --- a/escheduler-rpc/src/main/proto/scheduler.proto +++ /dev/null @@ -1,83 +0,0 @@ -syntax = "proto3"; - -package schduler; - -option java_multiple_files = true; -option java_package = "cn.escheduler.rpc"; -option java_outer_classname = "SchdulerProto"; - - -/** - * return str info - */ -message RetStrInfo { - /** - * str msg info - */ - string msg = 1 ; -} - -/** - * return byte info - */ -message RetByteInfo { - /** - * byte data info - */ - bytes data = 1; -} - -/** - * log parameter - */ -message LogParameter { - - /** - * path - */ - string path = 1 ; - - /** - * skip line num - */ - int32 skipLineNum = 2 ; - - /** - * display limt num - */ - int32 limit = 3 ; -} - - -/** - * path parameter - */ -message PathParameter { - - /** - * path - */ - string path = 1 ; -} - -/** - * log view service - */ -service LogViewService { - - /** - * roll view log - */ - rpc rollViewLog(LogParameter) returns (RetStrInfo) {}; - - /** - * view all log - */ - rpc viewLog(PathParameter) returns (RetStrInfo) {}; - - /** - * get log bytes - */ - rpc getLogBytes(PathParameter) returns (RetByteInfo) {}; -} - diff --git a/escheduler-server/pom.xml b/escheduler-server/pom.xml deleted file mode 100644 index 7f149278c8..0000000000 --- a/escheduler-server/pom.xml +++ /dev/null @@ -1,127 +0,0 @@ - - 4.0.0 - - escheduler - cn.analysys - 1.1.0-SNAPSHOT - - escheduler-server - escheduler-server - http://maven.apache.org - jar - - UTF-8 - - - - - cn.analysys - escheduler-common - - - protobuf-java - com.google.protobuf - - - io.netty - netty - - - io.netty - netty-all - - - com.google - netty - - - log4j-slf4j-impl - org.apache.logging.log4j - - - - - cn.analysys - escheduler-dao - - - spring-boot-starter-logging - org.springframework.boot - - - - - - cn.analysys - escheduler-rpc - - - org.apache.curator - curator-framework - - - org.apache.zookeeper - zookeeper - - - - - org.apache.curator - curator-recipes - - - - org.apache.httpcomponents - httpclient - - - org.apache.httpcomponents - httpcore - - - junit - junit - test - - - - cn.analysys - escheduler-alert - - - - - - - - - maven-assembly-plugin - - - src/main/assembly/package.xml - - false - - - - make-assembly - package - - single - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - ${java.version} - ${java.version} - ${project.build.sourceEncoding} - - - - - - diff --git a/escheduler-server/src/main/assembly/package.xml b/escheduler-server/src/main/assembly/package.xml deleted file mode 100644 index 7a8188c05b..0000000000 --- a/escheduler-server/src/main/assembly/package.xml +++ /dev/null @@ -1,74 +0,0 @@ - - cluster - - dir - - false - - - src/main/resources - - **/*.properties - **/*.xml - **/*.json - - conf - - - ${project.parent.basedir}/escheduler-common/src/main/resources - - **/*.properties - **/*.xml - **/*.json - - conf - - - ${project.parent.basedir}/escheduler-common/src/main/resources/bin - - *.* - - 755 - bin - - - ${project.parent.basedir}/escheduler-dao/src/main/resources - - **/*.properties - **/*.xml - **/*.json - - conf - - - ${project.parent.basedir}/escheduler-api/src/main/resources - - **/*.properties - **/*.xml - **/*.json - - conf - - - target/ - - escheduler-server-${project.version}.jar - - lib - - - - - lib - true - - javax.servlet:servlet-api - org.eclipse.jetty.aggregate:jetty-all - org.slf4j:slf4j-log4j12 - - - - \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/master/AbstractServer.java b/escheduler-server/src/main/java/cn/escheduler/server/master/AbstractServer.java deleted file mode 100644 index a3db90b010..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/master/AbstractServer.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master; - -import cn.escheduler.common.IStoppable; -import org.apache.commons.configuration.Configuration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.CommandLineRunner; -import org.springframework.context.annotation.ComponentScan; - -/** - * master server - */ -@ComponentScan("cn.escheduler") -public abstract class AbstractServer implements CommandLineRunner, IStoppable { - - private static final Logger logger = LoggerFactory.getLogger(AbstractServer.class); - - /** - * conf - */ - protected static Configuration conf; - - /** - * object lock - */ - protected final Object lock = new Object(); - - /** - * whether or not to close the state - */ - protected boolean terminated = false; - - - /** - * heartbeat interval, unit second - */ - protected int heartBeatInterval; - - - - /** - * blocking implement - * @throws InterruptedException - */ - public void awaitTermination() throws InterruptedException { - synchronized (lock) { - while (!terminated) { - lock.wait(); - } - } - } - - - /** - * Callback used to run the bean. - * @param args incoming main method arguments - * @throws Exception on error - */ - @Override - public abstract void run(String... args) throws Exception; - - /** - * gracefully stop - * @param cause why stopping - */ - @Override - public abstract void stop(String cause); -} - diff --git a/escheduler-server/src/main/java/cn/escheduler/server/master/MasterServer.java b/escheduler-server/src/main/java/cn/escheduler/server/master/MasterServer.java deleted file mode 100644 index d68b181660..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/master/MasterServer.java +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.common.thread.ThreadPoolExecutors; -import cn.escheduler.common.thread.ThreadUtils; -import cn.escheduler.common.utils.OSUtils; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.server.master.runner.MasterSchedulerThread; -import cn.escheduler.server.quartz.ProcessScheduleJob; -import cn.escheduler.server.quartz.QuartzExecutors; -import cn.escheduler.server.zk.ZKMasterClient; -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.commons.lang3.StringUtils; -import org.quartz.SchedulerException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.SpringApplication; -import org.springframework.boot.WebApplicationType; -import org.springframework.boot.builder.SpringApplicationBuilder; -import org.springframework.context.annotation.ComponentScan; - -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -/** - * master server - */ -@ComponentScan("cn.escheduler") -public class MasterServer extends AbstractServer { - - private static final Logger logger = LoggerFactory.getLogger(MasterServer.class); - - /** - * zk master client - */ - private static ZKMasterClient zkMasterClient = null; - - /** - * heartbeat thread pool - */ - private ScheduledExecutorService heartbeatMasterService; - - /** - * escheduler database interface - */ - @Autowired - protected ProcessDao processDao; - - /** - * master exec thread pool - */ - private ExecutorService masterSchedulerService; - - public MasterServer(){} - - public MasterServer(ProcessDao processDao){ - try { - conf = new PropertiesConfiguration(Constants.MASTER_PROPERTIES_PATH); - }catch (ConfigurationException e){ - logger.error("load configuration failed : " + e.getMessage(),e); - System.exit(1); - } - zkMasterClient = ZKMasterClient.getZKMasterClient(processDao); - this.masterSchedulerService = ThreadUtils.newDaemonSingleThreadExecutor("Master-Scheduler-Thread"); - } - - - /** - * master server startup - * - * master server not use web service - */ - public static void main(String[] args) { - SpringApplication app = new SpringApplication(MasterServer.class); - - app.run(args); - } - - - @Override - public void run(String... strings) throws Exception { - - MasterServer masterServer = new MasterServer(processDao); - - masterServer.run(processDao); - - logger.info("master server started"); - // blocking - masterServer.awaitTermination(); - } - - - public void run(ProcessDao processDao){ - - // heartbeat interval - heartBeatInterval = conf.getInt(Constants.MASTER_HEARTBEAT_INTERVAL, - Constants.defaultMasterHeartbeatInterval); - - // master exec thread pool num - int masterExecThreadNum = conf.getInt(Constants.MASTER_EXEC_THREADS, - Constants.defaultMasterExecThreadNum); - - - heartbeatMasterService = ThreadUtils.newDaemonThreadScheduledExecutor("Master-Main-Thread",Constants.defaulMasterHeartbeatThreadNum); - - // heartbeat thread implement - Runnable heartBeatThread = heartBeatThread(); - - zkMasterClient.setStoppable(this); - - // regular heartbeat - // delay 5 seconds, send heartbeat every 30 seconds - heartbeatMasterService. - scheduleAtFixedRate(heartBeatThread, 5, heartBeatInterval, TimeUnit.SECONDS); - - // master scheduler thread - MasterSchedulerThread masterSchedulerThread = new MasterSchedulerThread( - zkMasterClient, - processDao,conf, - masterExecThreadNum); - - // submit master scheduler thread - masterSchedulerService.execute(masterSchedulerThread); - - // start QuartzExecutors - // what system should do if exception - try { - ProcessScheduleJob.init(processDao); - QuartzExecutors.getInstance().start(); - } catch (Exception e) { - try { - QuartzExecutors.getInstance().shutdown(); - } catch (SchedulerException e1) { - logger.error("QuartzExecutors shutdown failed : " + e1.getMessage(), e1); - } - logger.error("start Quartz failed : " + e.getMessage(), e); - } - - - /** - * register hooks, which are called before the process exits - */ - Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { - @Override - public void run() { - logger.info("master server stopped"); - if (zkMasterClient.getActiveMasterNum() <= 1) { - for (int i = 0; i < Constants.ESCHEDULER_WARN_TIMES_FAILOVER;i++) { - zkMasterClient.getAlertDao().sendServerStopedAlert( - 1, OSUtils.getHost(), "Master-Server"); - } - } - } - })); - } - - - /** - * gracefully stop - * @param cause why stopping - */ - @Override - public synchronized void stop(String cause) { - - try { - //execute only once - if(Stopper.isStoped()){ - return; - } - - logger.info("master server is stopping ..., cause : {}", cause); - - // set stop signal is true - Stopper.stop(); - - try { - //thread sleep 3 seconds for thread quitely stop - Thread.sleep(3000L); - }catch (Exception e){ - logger.warn("thread sleep exception:" + e.getMessage(), e); - } - try { - heartbeatMasterService.shutdownNow(); - }catch (Exception e){ - logger.warn("heartbeat service stopped exception"); - } - - logger.info("heartbeat service stopped"); - - //close quartz - try{ - QuartzExecutors.getInstance().shutdown(); - }catch (Exception e){ - logger.warn("Quartz service stopped exception:{}",e.getMessage()); - } - - logger.info("Quartz service stopped"); - - try { - ThreadPoolExecutors.getInstance().shutdown(); - }catch (Exception e){ - logger.warn("threadpool service stopped exception:{}",e.getMessage()); - } - - logger.info("threadpool service stopped"); - - try { - masterSchedulerService.shutdownNow(); - }catch (Exception e){ - logger.warn("master scheduler service stopped exception:{}",e.getMessage()); - } - - logger.info("master scheduler service stopped"); - - try { - zkMasterClient.close(); - }catch (Exception e){ - logger.warn("zookeeper service stopped exception:{}",e.getMessage()); - } - - logger.info("zookeeper service stopped"); - - synchronized (lock) { - terminated = true; - lock.notifyAll(); - } - - } catch (Exception e) { - logger.error("master server stop exception : " + e.getMessage(), e); - System.exit(-1); - } - } - - - /** - * heartbeat thread implement - * @return - */ - private Runnable heartBeatThread(){ - Runnable heartBeatThread = new Runnable() { - @Override - public void run() { - if(Stopper.isRunning()) { - // send heartbeat to zk - if (StringUtils.isBlank(zkMasterClient.getMasterZNode())) { - logger.error("master send heartbeat to zk failed: can't find zookeeper path of master server"); - return; - } - - zkMasterClient.heartBeatForZk(zkMasterClient.getMasterZNode(), Constants.MASTER_PREFIX); - } - } - }; - return heartBeatThread; - } -} - diff --git a/escheduler-server/src/main/java/cn/escheduler/server/master/log/MasterLogFilter.java b/escheduler-server/src/main/java/cn/escheduler/server/master/log/MasterLogFilter.java deleted file mode 100644 index fdacd6d7fe..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/master/log/MasterLogFilter.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master.log; - -import ch.qos.logback.classic.Level; -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.core.filter.Filter; -import ch.qos.logback.core.spi.FilterReply; - -/** - * master log filter - */ -public class MasterLogFilter extends Filter { - - Level level; - - @Override - public FilterReply decide(ILoggingEvent event) { - if (event.getThreadName().startsWith("Master-")){ - return FilterReply.ACCEPT; - } - return FilterReply.DENY; - } - - public void setLevel(String level) { - this.level = Level.toLevel(level); - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterBaseTaskExecThread.java b/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterBaseTaskExecThread.java deleted file mode 100644 index b323b3ee99..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterBaseTaskExecThread.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master.runner; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.queue.ITaskQueue; -import cn.escheduler.common.queue.TaskQueueFactory; -import cn.escheduler.dao.AlertDao; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.dao.utils.BeanContext; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.concurrent.Callable; - -/** - * master task exec base class - */ -public class MasterBaseTaskExecThread implements Callable { - - private static final Logger logger = LoggerFactory.getLogger(MasterBaseTaskExecThread.class); - - /** - * process dao - */ - protected ProcessDao processDao; - - /** - * alert database access - */ - protected AlertDao alertDao; - - /** - * process instance - */ - protected ProcessInstance processInstance; - - /** - * task instance - */ - protected TaskInstance taskInstance; - - /** - * task queue - */ - protected ITaskQueue taskQueue; - protected boolean cancel; - - /** - * load configuration file - */ - private static Configuration conf; - - static { - try { - conf = new PropertiesConfiguration(Constants.MASTER_PROPERTIES_PATH); - } catch (ConfigurationException e) { - logger.error(e.getMessage(), e); - System.exit(1); - } - } - - public MasterBaseTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ - this.processDao = BeanContext.getBean(ProcessDao.class); - this.alertDao = BeanContext.getBean(AlertDao.class); - this.processInstance = processInstance; - this.taskQueue = TaskQueueFactory.getTaskQueueInstance(); - this.cancel = false; - this.taskInstance = taskInstance; - } - - public TaskInstance getTaskInstance(){ - return this.taskInstance; - } - - public void kill(){ - this.cancel = true; - } - - protected TaskInstance submit(){ - Integer commitRetryTimes = conf.getInt(Constants.MASTER_COMMIT_RETRY_TIMES, - Constants.defaultMasterCommitRetryTimes); - Integer commitRetryInterval = conf.getInt(Constants.MASTER_COMMIT_RETRY_INTERVAL, - Constants.defaultMasterCommitRetryInterval); - - int retryTimes = 1; - - while (retryTimes <= commitRetryTimes){ - try { - TaskInstance task = processDao.submitTask(taskInstance, processInstance); - if(task != null){ - return task; - } - logger.error("task commit to mysql and queue failed , task has already retry {} times, please check the database", commitRetryTimes); - Thread.sleep(commitRetryInterval); - } catch (Exception e) { - logger.error("task commit to mysql and queue failed : " + e.getMessage(),e); - } - retryTimes += 1; - } - return null; - } - - protected Boolean submitWaitComplete(){ - return true; - } - - @Override - public Boolean call() throws Exception { - return submitWaitComplete(); - } - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterExecThread.java b/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterExecThread.java deleted file mode 100644 index 613d9b12c6..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterExecThread.java +++ /dev/null @@ -1,1042 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master.runner; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.*; -import cn.escheduler.common.graph.DAG; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.model.TaskNodeRelation; -import cn.escheduler.common.process.ProcessDag; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.common.thread.ThreadUtils; -import cn.escheduler.common.utils.*; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.dao.utils.DagHelper; -import cn.escheduler.server.utils.AlertManager; -import com.alibaba.fastjson.JSONObject; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; - -import static cn.escheduler.common.Constants.*; - -/** - * master exec thread,split dag - */ -public class MasterExecThread implements Runnable { - - private static final Logger logger = LoggerFactory.getLogger(MasterExecThread.class); - - /** - * process instance - */ - private ProcessInstance processInstance; - - - /** - * runing TaskNode - */ - private final Map> activeTaskNode = new ConcurrentHashMap>(); - - private final ExecutorService taskExecService; - - /** - * submit failure nodes - */ - private Boolean taskFailedSubmit = false; - private List recoverNodeIdList = new ArrayList<>(); - private Map errorTaskList = new ConcurrentHashMap<>(); - private Map completeTaskList = new ConcurrentHashMap<>(); - private Map readyToSubmitTaskList = new ConcurrentHashMap<>(); - private Map dependFailedTask = new ConcurrentHashMap<>(); - private Map forbiddenTaskList = new ConcurrentHashMap<>(); - private List recoverToleranceFaultTaskList = new ArrayList<>(); - - private AlertManager alertManager = new AlertManager(); - - private DAG dag; - - /** - * process dao - */ - private ProcessDao processDao; - - /** - * load configuration file - */ - private static Configuration conf; - - public MasterExecThread(ProcessInstance processInstance){ - this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); - - this.processInstance = processInstance; - - int masterTaskExecNum = conf.getInt(Constants.MASTER_EXEC_TASK_THREADS, - Constants.defaultMasterTaskExecNum); - this.taskExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Task-Exec-Thread", - masterTaskExecNum); - } - - - static { - try { - conf = new PropertiesConfiguration(Constants.MASTER_PROPERTIES_PATH); - }catch (ConfigurationException e){ - logger.error("load configuration failed : " + e.getMessage(),e); - System.exit(1); - } - } - - @Override - public void run() { - - // process instance is null - if (processInstance == null){ - logger.info("process instance is not exists"); - return; - } - - // check to see if it's done - if (processInstance.getState().typeIsFinished()){ - logger.info("process instance is done : {}",processInstance.getId()); - return; - } - - try { - if (processInstance.isComplementData() && Flag.NO == processInstance.getIsSubProcess()){ - // sub process complement data - executeComplementProcess(); - }else{ - // execute flow - executeProcess(); - } - }catch (Exception e){ - logger.error("master exec thread exception: " + e.getMessage(), e); - logger.error("process execute failed, process id:{}", processInstance.getId()); - processInstance.setState(ExecutionStatus.FAILURE); - processInstance.setEndTime(new Date()); - processDao.updateProcessInstance(processInstance); - }finally { - taskExecService.shutdown(); - // post handle - postHandle(); - } - } - - private void executeProcess() throws Exception { - prepareProcess(); - runProcess(); - endProcess(); - } - - /** - * execute complement process - * @throws Exception - */ - private void executeComplementProcess() throws Exception { - - Map cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); - - Date startDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); - Date endDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); - processDao.saveProcessInstance(processInstance); - Date scheduleDate = processInstance.getScheduleTime(); - - if(scheduleDate == null){ - scheduleDate = startDate; - } - - while(Stopper.isRunning()){ - // prepare dag and other info - prepareProcess(); - - if(dag == null){ - logger.error("process {} dag is null, please check out parameters", - processInstance.getId()); - processInstance.setState(ExecutionStatus.SUCCESS); - processDao.updateProcessInstance(processInstance); - return; - } - - // execute process ,waiting for end - runProcess(); - - // process instace failure ,no more complements - if(!processInstance.getState().typeIsSuccess()){ - logger.info("process {} state {}, complement not completely!", - processInstance.getId(), processInstance.getState()); - break; - } - - // current process instance sucess ,next execute - scheduleDate = DateUtils.getSomeDay(scheduleDate, 1); - if(scheduleDate.after(endDate)){ - // all success - logger.info("process {} complement completely!", processInstance.getId()); - break; - } - - logger.info("process {} start to complement {} data", - processInstance.getId(), DateUtils.dateToString(scheduleDate)); - // execute next process instance complement data - processInstance.setScheduleTime(scheduleDate); - if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){ - cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); - processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); - } - - List taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId()); - for(TaskInstance taskInstance : taskInstanceList){ - taskInstance.setFlag(Flag.NO); - processDao.updateTaskInstance(taskInstance); - } - processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); - processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( - processInstance.getProcessDefinition().getGlobalParamMap(), - processInstance.getProcessDefinition().getGlobalParamList(), - CommandType.COMPLEMENT_DATA,processInstance.getScheduleTime())); - - processDao.saveProcessInstance(processInstance); - } - - // flow end - endProcess(); - - } - - - /** - * prepare process parameter - * @throws Exception - */ - private void prepareProcess() throws Exception { - // init task queue - initTaskQueue(); - - // gen process dag - buildFlowDag(); - logger.info("prepare process :{} end", processInstance.getId()); - } - - - /** - * process end handle - */ - private void endProcess() { - processInstance.setEndTime(new Date()); - processDao.updateProcessInstance(processInstance); - if(processInstance.getState().typeIsWaittingThread()){ - processDao.createRecoveryWaitingThreadCommand(null, processInstance); - } - List taskInstances = processDao.findValidTaskListByProcessId(processInstance.getId()); - alertManager.sendAlertProcessInstance(processInstance, taskInstances); - } - - - /** - * generate process dag - * @throws Exception - */ - private void buildFlowDag() throws Exception { - recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam()); - - forbiddenTaskList = DagHelper.getForbiddenTaskNodeMaps(processInstance.getProcessInstanceJson()); - // generate process to get DAG info - List recoveryNameList = getRecoveryNodeNameList(); - List startNodeNameList = parseStartNodeName(processInstance.getCommandParam()); - ProcessDag processDag = generateFlowDag(processInstance.getProcessInstanceJson(), - startNodeNameList, recoveryNameList, processInstance.getTaskDependType()); - if(processDag == null){ - //TODO... - logger.error("processDag is null"); - return; - } - // generate process dag - dag = DagHelper.buildDagGraph(processDag); - - } - - private void initTaskQueue(){ - - taskFailedSubmit = false; - activeTaskNode.clear(); - dependFailedTask.clear(); - completeTaskList.clear(); - errorTaskList.clear(); - List taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId()); - for(TaskInstance task : taskInstanceList){ - if(task.isTaskComplete()){ - completeTaskList.put(task.getName(), task); - } - if(task.getState().typeIsFailure() && !task.taskCanRetry()){ - errorTaskList.put(task.getName(), task); - } - } - } - - /** - * process post handle - */ - private void postHandle() { - logger.info("develop mode is: {}", CommonUtils.isDevelopMode()); - - if (!CommonUtils.isDevelopMode()) { - // get exec dir - String execLocalPath = cn.escheduler.common.utils.FileUtils - .getProcessExecDir(processInstance.getProcessDefinition().getProjectId(), - processInstance.getProcessDefinitionId(), - processInstance.getId()); - - try { - FileUtils.deleteDirectory(new File(execLocalPath)); - } catch (IOException e) { - logger.error("delete exec dir failed : " + e.getMessage(), e); - } - } - } - - - - /** - * submit task to execute - * @param taskInstance - */ - private TaskInstance submitTaskExec(TaskInstance taskInstance) { - MasterBaseTaskExecThread abstractExecThread = null; - if(taskInstance.isSubProcess()){ - abstractExecThread = new SubProcessTaskExecThread(taskInstance, processInstance); - }else { - abstractExecThread = new MasterTaskExecThread(taskInstance, processInstance); - } - Future future = taskExecService.submit(abstractExecThread); - activeTaskNode.putIfAbsent(abstractExecThread, future); - return abstractExecThread.getTaskInstance(); - } - - /** - * find task instance in db. - * in case submit more than one same name task in the same time. - * @param taskName - * @return - */ - private TaskInstance findTaskIfExists(String taskName){ - List taskInstanceList = processDao.findValidTaskListByProcessId(this.processInstance.getId()); - for(TaskInstance taskInstance : taskInstanceList){ - if(taskInstance.getName().equals(taskName)){ - return taskInstance; - } - } - return null; - } - - /** - * encapsulation task - * @param processInstance - * @param nodeName - * @return - */ - private TaskInstance createTaskInstance(ProcessInstance processInstance, String nodeName, - TaskNode taskNode, String parentNodeName) { - - TaskInstance taskInstance = findTaskIfExists(nodeName); - if(taskInstance == null){ - taskInstance = new TaskInstance(); - // task name - taskInstance.setName(nodeName); - // process instance define id - taskInstance.setProcessDefinitionId(processInstance.getProcessDefinitionId()); - // task instance state - taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); - // process instance id - taskInstance.setProcessInstanceId(processInstance.getId()); - // task instance node json - taskInstance.setTaskJson(JSONObject.toJSONString(taskNode)); - // task instance type - taskInstance.setTaskType(taskNode.getType()); - // task instance whether alert - taskInstance.setAlertFlag(Flag.NO); - - // task instance start time - taskInstance.setStartTime(new Date()); - - // task instance flag - taskInstance.setFlag(Flag.YES); - - // task instance retry times - taskInstance.setRetryTimes(0); - - // max task instance retry times - taskInstance.setMaxRetryTimes(taskNode.getMaxRetryTimes()); - - // retry task instance interval - taskInstance.setRetryInterval(taskNode.getRetryInterval()); - - // task instance priority - if(taskNode.getTaskInstancePriority() == null){ - taskInstance.setTaskInstancePriority(Priority.MEDIUM); - }else{ - taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority()); - } - - int workerGroupId = taskNode.getWorkerGroupId(); - taskInstance.setWorkerGroupId(workerGroupId); - - } - return taskInstance; - } - - - - /** - * get post task instance by node - * - * @param dag - * @param parentNodeName - * @return - */ - private List getPostTaskInstanceByNode(DAG dag, String parentNodeName){ - - List postTaskList = new ArrayList<>(); - Collection startVertex = DagHelper.getStartVertex(parentNodeName, dag, completeTaskList); - if(startVertex == null){ - return postTaskList; - } - - for (String nodeName : startVertex){ - // encapsulation task instance - TaskInstance taskInstance = createTaskInstance(processInstance, nodeName , - dag.getNode(nodeName),parentNodeName); - postTaskList.add(taskInstance); - } - return postTaskList; - } - - /** - * - * return start task node list - * - * @return - */ - private List getStartSubmitTaskList(){ - - List startTaskList = getPostTaskInstanceByNode(dag, null); - - HashMap successTaskMaps = new HashMap<>(); - List resultList = new ArrayList<>(); - while(Stopper.isRunning()){ - for(TaskInstance task : startTaskList){ - if(task.getState().typeIsSuccess()){ - successTaskMaps.put(task.getName(), task); - }else if(!completeTaskList.containsKey(task.getName()) && !errorTaskList.containsKey(task.getName())){ - resultList.add(task); - } - } - startTaskList.clear(); - if(successTaskMaps.size() == 0){ - break; - } - - Set taskNameKeys = successTaskMaps.keySet(); - for(String taskName : taskNameKeys){ - startTaskList.addAll(getPostTaskInstanceByNode(dag, taskName)); - } - successTaskMaps.clear(); - } - return resultList; - } - - /** - * submit post node - * @param parentNodeName - */ - private void submitPostNode(String parentNodeName){ - - List submitTaskList = null; - if(parentNodeName == null){ - submitTaskList = getStartSubmitTaskList(); - }else{ - submitTaskList = getPostTaskInstanceByNode(dag, parentNodeName); - } - // if previous node success , post node submit - for(TaskInstance task : submitTaskList){ - if(readyToSubmitTaskList.containsKey(task.getName())){ - continue; - } - - if(completeTaskList.containsKey(task.getName())){ - logger.info("task {} has already run success", task.getName()); - continue; - } - if(task.getState().typeIsPause() || task.getState().typeIsCancel()){ - logger.info("task {} stopped, the state is {}", task.getName(), task.getState().toString()); - }else{ - addTaskToStandByList(task); - } - } - } - - /** - * determine whether the dependencies of the task node are complete - * @return - */ - private DependResult isTaskDepsComplete(String taskName) { - - Collection startNodes = dag.getBeginNode(); - // ff the vertex returns true directly - if(startNodes.contains(taskName)){ - return DependResult.SUCCESS; - } - - TaskNode taskNode = dag.getNode(taskName); - List depsNameList = taskNode.getDepList(); - for(String depsNode : depsNameList ){ - - if(forbiddenTaskList.containsKey(depsNode)){ - continue; - } - // dependencies must be fully completed - if(!completeTaskList.containsKey(depsNode)){ - return DependResult.WAITING; - } - ExecutionStatus taskState = completeTaskList.get(depsNode).getState(); - if(taskState.typeIsFailure()){ - return DependResult.FAILED; - } - if(taskState.typeIsPause() || taskState.typeIsCancel()){ - return DependResult.WAITING; - } - } - - logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray())); - - return DependResult.SUCCESS; - } - - - /** - * query task instance by complete state - * @param state - * @return - */ - private List getCompleteTaskByState(ExecutionStatus state){ - List resultList = new ArrayList<>(); - Set taskList = completeTaskList.keySet(); - for(String taskName : taskList){ - TaskInstance taskInstance = completeTaskList.get(taskName); - if(taskInstance.getState() == state){ - resultList.add(taskInstance); - } - } - return resultList; - } - - /** - * where there are ongoing tasks - * @param state - * @return - */ - private ExecutionStatus runningState(ExecutionStatus state){ - if(state == ExecutionStatus.READY_STOP || - state == ExecutionStatus.READY_PAUSE || - state == ExecutionStatus.WAITTING_THREAD){ - // if the running task is not completed, the state remains unchanged - return state; - }else{ - return ExecutionStatus.RUNNING_EXEUTION; - } - } - - /** - * exists failure task , contains submit failure、dependency failure,execute failure(retry after) - * - * @return - */ - private Boolean hasFailedTask(){ - - if(this.taskFailedSubmit){ - return true; - } - if(this.errorTaskList.size() > 0){ - return true; - } - return this.dependFailedTask.size() > 0; - } - - /** - * process instance failure - * - * @return - */ - private Boolean processFailed(){ - if(hasFailedTask()) { - if(processInstance.getFailureStrategy() == FailureStrategy.END){ - return true; - } - if (processInstance.getFailureStrategy() == FailureStrategy.CONTINUE) { - return readyToSubmitTaskList.size() == 0 || activeTaskNode.size() == 0; - } - } - return false; - } - - /** - * whether task for waiting thread - * @return - */ - private Boolean hasWaitingThreadTask(){ - - List waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD); - return waitingList.size() > 0; - } - - /** - * prepare for pause - * 1,failed retry task in the preparation queue , returns to failure directly - * 2,exists pause task,complement not completed, pending submission of tasks, return to suspension - * 3,success - * @return - */ - private ExecutionStatus processReadyPause(){ - if(hasRetryTaskInStandBy()){ - return ExecutionStatus.FAILURE; - } - - List pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE); - if(pauseList.size() > 0 - || !isComplementEnd() - || readyToSubmitTaskList.size() > 0){ - return ExecutionStatus.PAUSE; - }else{ - return ExecutionStatus.SUCCESS; - } - } - - - /** - * generate the latest process instance status by the tasks state - * @return - */ - private ExecutionStatus getProcessInstanceState(){ - ProcessInstance instance = processDao.findProcessInstanceById(processInstance.getId()); - ExecutionStatus state = instance.getState(); - - if(activeTaskNode.size() > 0){ - return runningState(state); - } - // process failure - if(processFailed()){ - return ExecutionStatus.FAILURE; - } - - // waiting thread - if(hasWaitingThreadTask()){ - return ExecutionStatus.WAITTING_THREAD; - } - - // pause - if(state == ExecutionStatus.READY_PAUSE){ - return processReadyPause(); - } - - // stop - if(state == ExecutionStatus.READY_STOP){ - List stopList = getCompleteTaskByState(ExecutionStatus.STOP); - List killList = getCompleteTaskByState(ExecutionStatus.KILL); - if(stopList.size() > 0 || killList.size() > 0 || !isComplementEnd()){ - return ExecutionStatus.STOP; - }else{ - return ExecutionStatus.SUCCESS; - } - } - - // success - if(state == ExecutionStatus.RUNNING_EXEUTION){ - if(readyToSubmitTaskList.size() > 0){ - //tasks currently pending submission, no retries, indicating that depend is waiting to complete - return ExecutionStatus.RUNNING_EXEUTION; - }else{ - // if the waiting queue is empty and the status is in progress, then success - return ExecutionStatus.SUCCESS; - } - } - - return state; - } - - /** - * whether complement end - * @return - */ - private Boolean isComplementEnd() { - if(!processInstance.isComplementData()){ - return true; - } - - try { - Map cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); - Date endTime = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); - return processInstance.getScheduleTime().equals(endTime); - } catch (Exception e) { - logger.error("complement end failed : " + e.getMessage(),e); - return false; - } - } - - /** - * updateProcessInstance process instance state - * after each batch of tasks is executed, the status of the process instance is updated - */ - private void updateProcessInstanceState() { - ExecutionStatus state = getProcessInstanceState(); - if(processInstance.getState() != state){ - logger.info( - "work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}", - processInstance.getId(), processInstance.getName(), - processInstance.getState().toString(), state.toString(), - processInstance.getCommandType().toString()); - processInstance.setState(state); - ProcessInstance instance = processDao.findProcessInstanceById(processInstance.getId()); - instance.setState(state); - instance.setProcessDefinition(processInstance.getProcessDefinition()); - processDao.updateProcessInstance(instance); - processInstance = instance; - } - } - - /** - * get task dependency result - * @param taskInstance - * @return - */ - private DependResult getDependResultForTask(TaskInstance taskInstance){ - DependResult inner = isTaskDepsComplete(taskInstance.getName()); - return inner; - } - - /** - * add task to standy list - * @param taskInstance - */ - private void addTaskToStandByList(TaskInstance taskInstance){ - logger.info("add task to stand by list: {}", taskInstance.getName()); - readyToSubmitTaskList.putIfAbsent(taskInstance.getName(), taskInstance); - } - - /** - * remove task from stand by list - * @param taskInstance - */ - private void removeTaskFromStandbyList(TaskInstance taskInstance){ - logger.info("remove task from stand by list: {}", taskInstance.getName()); - readyToSubmitTaskList.remove(taskInstance.getName()); - } - - /** - * has retry task in standby - * @return - */ - private Boolean hasRetryTaskInStandBy(){ - Set taskNameSet = this.readyToSubmitTaskList.keySet(); - for(String taskName : taskNameSet){ - TaskInstance task = this.readyToSubmitTaskList.get(taskName); - if(task.getState().typeIsFailure()){ - return true; - } - } - return false; - } - - /** - * submit and watch the tasks, until the work flow stop - */ - private void runProcess(){ - // submit start node - submitPostNode(null); - boolean sendTimeWarning = false; - while(!processInstance.IsProcessInstanceStop()){ - - // send warning email if process time out. - if( !sendTimeWarning && checkProcessTimeOut(processInstance) ){ - alertManager.sendProcessTimeoutAlert(processInstance, - processDao.findProcessDefineById(processInstance.getProcessDefinitionId())); - sendTimeWarning = true; - } - Set keys = activeTaskNode.keySet(); - for (MasterBaseTaskExecThread taskExecThread : keys) { - Future future = activeTaskNode.get(taskExecThread); - TaskInstance task = taskExecThread.getTaskInstance(); - - if(!future.isDone()){ - continue; - } - // node monitor thread complete - activeTaskNode.remove(taskExecThread); - if(task == null){ - this.taskFailedSubmit = true; - continue; - } - logger.info("task :{}, id:{} complete, state is {} ", - task.getName(), task.getId(), task.getState().toString()); - // node success , post node submit - if(task.getState() == ExecutionStatus.SUCCESS){ - completeTaskList.put(task.getName(), task); - submitPostNode(task.getName()); - continue; - } - // node fails, retry first, and then execute the failure process - if(task.getState().typeIsFailure()){ - if(task.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE){ - this.recoverToleranceFaultTaskList.add(task); - } - if(task.taskCanRetry()){ - addTaskToStandByList(task); - }else{ - // node failure, based on failure strategy - errorTaskList.put(task.getName(), task); - completeTaskList.put(task.getName(), task); - if(processInstance.getFailureStrategy() == FailureStrategy.END){ - kill(); - } - } - continue; - } - // other status stop/pause - completeTaskList.put(task.getName(), task); - } - // send alert - if(this.recoverToleranceFaultTaskList.size() > 0){ - alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList); - this.recoverToleranceFaultTaskList.clear(); - } - // updateProcessInstance completed task status - // failure priority is higher than pause - // if a task fails, other suspended tasks need to be reset kill - if(errorTaskList.size() > 0){ - for(String taskName : completeTaskList.keySet()){ - TaskInstance completeTask = completeTaskList.get(taskName); - if(completeTask.getState()== ExecutionStatus.PAUSE){ - completeTask.setState(ExecutionStatus.KILL); - completeTaskList.put(taskName, completeTask); - processDao.updateTaskInstance(completeTask); - } - } - } - if(canSubmitTaskToQueue()){ - submitStandByTask(); - } - try { - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } catch (InterruptedException e) { - logger.error(e.getMessage(),e); - } - updateProcessInstanceState(); - } - - logger.info("process:{} end, state :{}", processInstance.getId(), processInstance.getState()); - } - - /** - * check process time out - * @param processInstance - * @return - */ - private boolean checkProcessTimeOut(ProcessInstance processInstance) { - if(processInstance.getTimeout() == 0 ){ - return false; - } - - Date now = new Date(); - long runningTime = DateUtils.diffMin(now, processInstance.getStartTime()); - - if(runningTime > processInstance.getTimeout()){ - return true; - } - return false; - } - - private boolean canSubmitTaskToQueue() { - return OSUtils.checkResource(conf, true); - } - - - /** - * close the ongoing tasks - */ - private void kill() { - - logger.info("kill called on process instance id: {}, num: {}", processInstance.getId(), - activeTaskNode.size()); - for (Map.Entry> entry : activeTaskNode.entrySet()) { - - MasterBaseTaskExecThread taskExecThread = entry.getKey(); - Future future = entry.getValue(); - - if (!future.isDone()) { - // record kill info - logger.info("kill process instance, id: {}, task: {}", processInstance.getId(), taskExecThread.getTaskInstance().getId()); - - // kill node - taskExecThread.kill(); - } - } - } - - /** - * whether the retry interval is timed out - * @param taskInstance - * @return - */ - private Boolean retryTaskIntervalOverTime(TaskInstance taskInstance){ - if(taskInstance.getState() != ExecutionStatus.FAILURE){ - return Boolean.TRUE; - } - if(taskInstance.getId() == 0 || - taskInstance.getMaxRetryTimes() ==0 || - taskInstance.getRetryInterval() == 0 ){ - return Boolean.TRUE; - } - Date now = new Date(); - long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime()); - // task retry does not over time, return false - if(taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT >= failedTimeInterval){ - return Boolean.FALSE; - } - return Boolean.TRUE; - } - - /** - * handling the list of tasks to be submitted - */ - private void submitStandByTask(){ - Set readySubmitTaskNames = readyToSubmitTaskList.keySet(); - for(String readySubmitTaskName : readySubmitTaskNames){ - TaskInstance task = readyToSubmitTaskList.get(readySubmitTaskName); - DependResult dependResult = getDependResultForTask(task); - if(DependResult.SUCCESS == dependResult){ - if(retryTaskIntervalOverTime(task)){ - submitTaskExec(task); - removeTaskFromStandbyList(task); - } - }else if(DependResult.FAILED == dependResult){ - // if the dependency fails, the current node is not submitted and the state changes to failure. - dependFailedTask.put(readySubmitTaskName, task); - removeTaskFromStandbyList(task); - logger.info("task {},id:{} depend result : {}",task.getName(), task.getId(), dependResult); - } - } - } - - private TaskInstance getRecoveryTaskInstance(String taskId){ - if(!StringUtils.isNotEmpty(taskId)){ - return null; - } - try { - Integer intId = Integer.valueOf(taskId); - TaskInstance task = processDao.findTaskInstanceById(intId); - if(task == null){ - logger.error("start node id cannot be found: {}", taskId); - }else { - return task; - } - }catch (Exception e){ - logger.error("get recovery task instance failed : " + e.getMessage(),e); - } - return null; - } - - /** - * get start task instance list - * @param cmdParam - * @return - */ - private List getStartTaskInstanceList( String cmdParam){ - - List instanceList = new ArrayList<>(); - Map paramMap = JSONUtils.toMap(cmdParam); - - if(paramMap != null && paramMap.containsKey(CMDPARAM_RECOVERY_START_NODE_STRING)){ - String[] idList = paramMap.get(CMDPARAM_RECOVERY_START_NODE_STRING).split(Constants.COMMA); - for(String nodeId : idList){ - TaskInstance task = getRecoveryTaskInstance(nodeId); - if(task != null){ - instanceList.add(task); - } - } - } - return instanceList; - } - - /** - * parse "StartNodeNameList" from cmd param - * @param cmdParam - * @return - */ - private List parseStartNodeName(String cmdParam){ - List startNodeNameList = new ArrayList<>(); - Map paramMap = JSONUtils.toMap(cmdParam); - if(paramMap == null){ - return startNodeNameList; - } - if(paramMap.containsKey(CMDPARAM_START_NODE_NAMES)){ - startNodeNameList = Arrays.asList(paramMap.get(CMDPARAM_START_NODE_NAMES).split(Constants.COMMA)); - } - return startNodeNameList; - } - - /** - * generate start node name list from parsing command param; - * if "StartNodeIdList" exists in command param, return StartNodeIdList - * @return - */ - private List getRecoveryNodeNameList(){ - List recoveryNodeNameList = new ArrayList<>(); - if(recoverNodeIdList.size() > 0) { - for (TaskInstance task : recoverNodeIdList) { - recoveryNodeNameList.add(task.getName()); - } - } - return recoveryNodeNameList; - } - - /** - * generate flow dag - * @param processDefinitionJson - * @return - * @throws Exception - */ - public ProcessDag generateFlowDag(String processDefinitionJson, - List startNodeNameList, - List recoveryNodeNameList, - TaskDependType depNodeType)throws Exception{ - return DagHelper.generateFlowDag(processDefinitionJson, startNodeNameList, recoveryNodeNameList, depNodeType); - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterSchedulerThread.java b/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterSchedulerThread.java deleted file mode 100644 index e886c7a2da..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterSchedulerThread.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master.runner; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.common.thread.ThreadUtils; -import cn.escheduler.common.utils.OSUtils; -import cn.escheduler.common.zk.AbstractZKClient; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.server.zk.ZKMasterClient; -import org.apache.commons.configuration.Configuration; -import org.apache.curator.framework.imps.CuratorFrameworkState; -import org.apache.curator.framework.recipes.locks.InterProcessMutex; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadPoolExecutor; - -/** - * master scheduler thread - */ -public class MasterSchedulerThread implements Runnable { - - private static final Logger logger = LoggerFactory.getLogger(MasterSchedulerThread.class); - - private final ExecutorService masterExecService; - - /** - * escheduler database interface - */ - private final ProcessDao processDao; - - private final ZKMasterClient zkMasterClient ; - - private int masterExecThreadNum; - - private final Configuration conf; - - - public MasterSchedulerThread(ZKMasterClient zkClient, ProcessDao processDao, Configuration conf, int masterExecThreadNum){ - this.processDao = processDao; - this.zkMasterClient = zkClient; - this.conf = conf; - this.masterExecThreadNum = masterExecThreadNum; - this.masterExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Exec-Thread",masterExecThreadNum); - } - - - @Override - public void run() { - while (Stopper.isRunning()){ - - // process instance - ProcessInstance processInstance = null; - - InterProcessMutex mutex = null; - try { - - if(OSUtils.checkResource(conf, true)){ - if (zkMasterClient.getZkClient().getState() == CuratorFrameworkState.STARTED) { - - // create distributed lock with the root node path of the lock space as /escheduler/lock/failover/master - String znodeLock = zkMasterClient.getMasterLockPath(); - - mutex = new InterProcessMutex(zkMasterClient.getZkClient(), znodeLock); - mutex.acquire(); - - ThreadPoolExecutor poolExecutor = (ThreadPoolExecutor) masterExecService; - int activeCount = poolExecutor.getActiveCount(); - // make sure to scan and delete command table in one transaction - processInstance = processDao.scanCommand(logger, OSUtils.getHost(), this.masterExecThreadNum - activeCount); - if (processInstance != null) { - logger.info("start master exex thread , split DAG ..."); - masterExecService.execute(new MasterExecThread(processInstance)); - } - } - } - - // accessing the command table every SLEEP_TIME_MILLIS milliseconds - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - - }catch (Exception e){ - logger.error("master scheduler thread exception : " + e.getMessage(),e); - }finally{ - AbstractZKClient.releaseMutex(mutex); - } - } - } - - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterTaskExecThread.java b/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterTaskExecThread.java deleted file mode 100644 index 1713cda59f..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/MasterTaskExecThread.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master.runner; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.TaskTimeoutStrategy; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.task.TaskTimeoutParameter; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import com.alibaba.fastjson.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Date; - -import static cn.escheduler.common.Constants.SCHEDULER_TASKS_KILL; - -/** - * master task exec thread - */ -public class MasterTaskExecThread extends MasterBaseTaskExecThread { - - private static final Logger logger = LoggerFactory.getLogger(MasterTaskExecThread.class); - - - public MasterTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ - super(taskInstance, processInstance); - } - - /** - * get task instance - * @return - */ - @Override - public TaskInstance getTaskInstance(){ - return this.taskInstance; - } - - private Boolean alreadyKilled = false; - - @Override - public Boolean submitWaitComplete() { - Boolean result = false; - this.taskInstance = submit(); - if(!this.taskInstance.getState().typeIsFinished()) { - result = waitTaskQuit(); - } - taskInstance.setEndTime(new Date()); - processDao.updateTaskInstance(taskInstance); - logger.info("task :{} id:{}, process id:{}, exec thread completed ", - this.taskInstance.getName(),taskInstance.getId(), processInstance.getId() ); - return result; - } - - - public Boolean waitTaskQuit(){ - // query new state - taskInstance = processDao.findTaskInstanceById(taskInstance.getId()); - Boolean result = true; - // task time out - Boolean checkTimeout = false; - TaskTimeoutParameter taskTimeoutParameter = getTaskTimeoutParameter(); - if(taskTimeoutParameter.getEnable()){ - TaskTimeoutStrategy strategy = taskTimeoutParameter.getStrategy(); - if(strategy == TaskTimeoutStrategy.WARN || strategy == TaskTimeoutStrategy.WARNFAILED){ - checkTimeout = true; - } - } - - while (Stopper.isRunning()){ - try { - if(this.processInstance == null){ - logger.error("process instance not exists , master task exec thread exit"); - return result; - } - // task instance add queue , waiting worker to kill - if(this.cancel || this.processInstance.getState() == ExecutionStatus.READY_STOP){ - cancelTaskInstance(); - } - // task instance finished - if (taskInstance.getState().typeIsFinished()){ - break; - } - if(checkTimeout){ - long remainTime = getRemaintime(taskTimeoutParameter.getInterval()*60); - if (remainTime < 0) { - logger.warn("task id: {} execution time out",taskInstance.getId()); - // process define - ProcessDefinition processDefine = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); - // send warn mail - alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(),processDefine.getReceivers(),processDefine.getReceiversCc(),taskInstance.getId(),taskInstance.getName()); - checkTimeout = false; - } - } - // updateProcessInstance task instance - taskInstance = processDao.findTaskInstanceById(taskInstance.getId()); - processInstance = processDao.findProcessInstanceById(processInstance.getId()); - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } catch (Exception e) { - logger.error("exception: "+ e.getMessage(),e); - logger.error("wait task quit failed, instance id:{}, task id:{}", - processInstance.getId(), taskInstance.getId()); - } - } - return result; - } - - - /** - * task instance add queue , waiting worker to kill - */ - private void cancelTaskInstance(){ - if(alreadyKilled || taskInstance.getHost() == null){ - return ; - } - alreadyKilled = true; - String queueValue = String.format("%s-%d", - taskInstance.getHost(), taskInstance.getId()); - taskQueue.sadd(SCHEDULER_TASKS_KILL, queueValue); - - logger.info("master add kill task :{} id:{} to kill queue", - taskInstance.getName(), taskInstance.getId() ); - } - - /** - * get task timeout parameter - * @return - */ - private TaskTimeoutParameter getTaskTimeoutParameter(){ - String taskJson = taskInstance.getTaskJson(); - TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); - return taskNode.getTaskTimeoutParameter(); - } - - - /** - * get remain time(s) - * - * @return - */ - private long getRemaintime(long timeoutSeconds) { - Date startTime = taskInstance.getStartTime(); - long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000; - long remainTime = timeoutSeconds - usedTime; - return remainTime; - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/SubProcessTaskExecThread.java b/escheduler-server/src/main/java/cn/escheduler/server/master/runner/SubProcessTaskExecThread.java deleted file mode 100644 index efcd308cb2..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/master/runner/SubProcessTaskExecThread.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master.runner; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Date; - -/** - * subflow task exec thread - */ -public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { - - - private static final Logger logger = LoggerFactory.getLogger(SubProcessTaskExecThread.class); - - - private ProcessInstance subProcessInstance; - - public SubProcessTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ - super(taskInstance, processInstance); - } - - @Override - public Boolean submitWaitComplete() { - - Boolean result = false; - try{ - // submit task instance - this.taskInstance = submit(); - - if(taskInstance == null){ - logger.error("sub work flow submit task instance to mysql and queue failed , please check and fix it"); - return result; - } - setTaskInstanceState(); - waitTaskQuit(); - subProcessInstance = processDao.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); - - // at the end of the subflow , the task state is changed to the subflow state - if(subProcessInstance != null){ - if(subProcessInstance.getState() == ExecutionStatus.STOP){ - this.taskInstance.setState(ExecutionStatus.KILL); - }else{ - this.taskInstance.setState(subProcessInstance.getState()); - result = true; - } - } - taskInstance.setEndTime(new Date()); - processDao.updateTaskInstance(taskInstance); - logger.info("subflow task :{} id:{}, process id:{}, exec thread completed ", - this.taskInstance.getName(),taskInstance.getId(), processInstance.getId() ); - result = true; - - }catch (Exception e){ - logger.error("exception: "+ e.getMessage(),e); - logger.error("wait task quit failed, instance id:{}, task id:{}", - processInstance.getId(), taskInstance.getId()); - } - return result; - } - - - /** - * set task instance state - * @return - */ - private Boolean setTaskInstanceState(){ - subProcessInstance = processDao.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); - if(subProcessInstance == null || taskInstance.getState().typeIsFinished()){ - return false; - } - - taskInstance.setState(ExecutionStatus.RUNNING_EXEUTION); - taskInstance.setStartTime(new Date()); - processDao.updateTaskInstance(taskInstance); - return true; - } - - /** - * updateProcessInstance parent state - */ - private void updateParentProcessState(){ - ProcessInstance parentProcessInstance = processDao.findProcessInstanceById(this.processInstance.getId()); - - if(parentProcessInstance == null){ - logger.error("parent work flow instance is null , please check it! work flow id {}", processInstance.getId()); - return; - } - this.processInstance.setState(parentProcessInstance.getState()); - } - - /** - * wait task quit - * @throws InterruptedException - */ - private void waitTaskQuit() throws InterruptedException { - - logger.info("wait sub work flow: {} complete", this.taskInstance.getName()); - - if (taskInstance.getState().typeIsFinished()) { - logger.info("sub work flow task {} already complete. task state:{}, parent work flow instance state:{}", - this.taskInstance.getName(), - this.taskInstance.getState().toString(), - this.processInstance.getState().toString()); - return; - } - while (Stopper.isRunning()) { - // waiting for subflow process instance establishment - if (subProcessInstance == null) { - - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - - if(!setTaskInstanceState()){ - continue; - } - } - subProcessInstance = processDao.findProcessInstanceById(subProcessInstance.getId()); - updateParentProcessState(); - if (subProcessInstance.getState().typeIsFinished()){ - break; - } - - if(this.processInstance.getState() == ExecutionStatus.READY_PAUSE){ - // parent process "ready to pause" , child process "pause" - pauseSubProcess(); - }else if(this.cancel || this.processInstance.getState() == ExecutionStatus.READY_STOP){ - // parent Process "Ready to Cancel" , subflow "Cancel" - stopSubProcess(); - } - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } - } - - /** - * stop subflow - */ - private void stopSubProcess() { - if(subProcessInstance.getState() == ExecutionStatus.STOP || - subProcessInstance.getState() == ExecutionStatus.READY_STOP){ - return; - } - subProcessInstance.setState(ExecutionStatus.READY_STOP); - processDao.updateProcessInstance(subProcessInstance); - } - - /** - * pause subflow - */ - private void pauseSubProcess() { - if(subProcessInstance.getState() == ExecutionStatus.PAUSE || - subProcessInstance.getState() == ExecutionStatus.READY_PAUSE){ - return; - } - subProcessInstance.setState(ExecutionStatus.READY_PAUSE); - processDao.updateProcessInstance(subProcessInstance); - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/quartz/DruidConnectionProvider.java b/escheduler-server/src/main/java/cn/escheduler/server/quartz/DruidConnectionProvider.java deleted file mode 100644 index b58d87ce9d..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/quartz/DruidConnectionProvider.java +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.quartz; - -import com.alibaba.druid.pool.DruidDataSource; -import org.quartz.SchedulerException; -import java.sql.Connection; -import java.sql.SQLException; -import org.quartz.utils.ConnectionProvider; - -/** - * druid connection provider - */ -public class DruidConnectionProvider implements ConnectionProvider { - - /** - * JDBC driver - */ - public String driver; - - /** - * JDBC URL - */ - public String URL; - - /** - * Database user name - */ - public String user; - - /** - * Database password - */ - public String password; - - /** - * Maximum number of database connections - */ - public int maxConnections; - - /** - * The query that validates the database connection - */ - public String validationQuery; - - /** - * Whether the database sql query to validate connections should be executed every time - * a connection is retrieved from the pool to ensure that it is still valid. If false, - * then validation will occur on check-in. Default is false. - */ - private boolean validateOnCheckout; - - /** - * The number of seconds between tests of idle connections - only enabled - * if the validation query property is set. Default is 50 seconds. - */ - private int idleConnectionValidationSeconds; - - /** - * The maximum number of prepared statements that will be cached per connection in the pool. - * Depending upon your JDBC Driver this may significantly help performance, or may slightly - * hinder performance. - * Default is 120, as Quartz uses over 100 unique statements. 0 disables the feature. - */ - public String maxCachedStatementsPerConnection; - - /** - * Discard connections after they have been idle this many seconds. 0 disables the feature. Default is 0. - */ - private String discardIdleConnectionsSeconds; - - /** - * Default maximum number of database connections in the pool. - */ - public static final int DEFAULT_DB_MAX_CONNECTIONS = 10; - - /** - * The maximum number of prepared statements that will be cached per connection in the pool. - */ - public static final int DEFAULT_DB_MAX_CACHED_STATEMENTS_PER_CONNECTION = 120; - - /** - * Druid connection pool - */ - private DruidDataSource datasource; - - public Connection getConnection() throws SQLException { - return datasource.getConnection(); - } - public void shutdown() throws SQLException { - datasource.close(); - } - public void initialize() throws SQLException{ - if (this.URL == null) { - throw new SQLException("DBPool could not be created: DB URL cannot be null"); - } - if (this.driver == null) { - throw new SQLException("DBPool driver could not be created: DB driver class name cannot be null!"); - } - if (this.maxConnections < 0) { - throw new SQLException("DBPool maxConnectins could not be created: Max connections must be greater than zero!"); - } - datasource = new DruidDataSource(); - try{ - datasource.setDriverClassName(this.driver); - } catch (Exception e) { - try { - throw new SchedulerException("Problem setting driver class name on datasource: " + e.getMessage(), e); - } catch (SchedulerException e1) { - } - } - datasource.setUrl(this.URL); - datasource.setUsername(this.user); - datasource.setPassword(this.password); - datasource.setMaxActive(this.maxConnections); - datasource.setMinIdle(1); - datasource.setMaxWait(0); - datasource.setMaxPoolPreparedStatementPerConnectionSize(DEFAULT_DB_MAX_CONNECTIONS); - if (this.validationQuery != null) { - datasource.setValidationQuery(this.validationQuery); - if(!this.validateOnCheckout) - datasource.setTestOnReturn(true); - else - datasource.setTestOnBorrow(true); - datasource.setValidationQueryTimeout(this.idleConnectionValidationSeconds); - } - } - - public String getDriver() { - return driver; - } - public void setDriver(String driver) { - this.driver = driver; - } - public String getURL() { - return URL; - } - public void setURL(String URL) { - this.URL = URL; - } - public String getUser() { - return user; - } - public void setUser(String user) { - this.user = user; - } - public String getPassword() { - return password; - } - public void setPassword(String password) { - this.password = password; - } - public int getMaxConnections() { - return maxConnections; - } - public void setMaxConnections(int maxConnections) { - this.maxConnections = maxConnections; - } - public String getValidationQuery() { - return validationQuery; - } - public void setValidationQuery(String validationQuery) { - this.validationQuery = validationQuery; - } - public boolean isValidateOnCheckout() { - return validateOnCheckout; - } - public void setValidateOnCheckout(boolean validateOnCheckout) { - this.validateOnCheckout = validateOnCheckout; - } - public int getIdleConnectionValidationSeconds() { - return idleConnectionValidationSeconds; - } - public void setIdleConnectionValidationSeconds(int idleConnectionValidationSeconds) { - this.idleConnectionValidationSeconds = idleConnectionValidationSeconds; - } - public DruidDataSource getDatasource() { - return datasource; - } - public void setDatasource(DruidDataSource datasource) { - this.datasource = datasource; - } - public String getDiscardIdleConnectionsSeconds() { - return discardIdleConnectionsSeconds; - } - public void setDiscardIdleConnectionsSeconds(String discardIdleConnectionsSeconds) { - this.discardIdleConnectionsSeconds = discardIdleConnectionsSeconds; - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/quartz/ProcessScheduleJob.java b/escheduler-server/src/main/java/cn/escheduler/server/quartz/ProcessScheduleJob.java deleted file mode 100644 index 49c0d9bd32..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/quartz/ProcessScheduleJob.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.quartz; - - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.enums.ReleaseState; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.Command; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.Schedule; -import org.quartz.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.util.Assert; - -import java.util.Date; - -import static cn.escheduler.server.quartz.QuartzExecutors.buildJobGroupName; -import static cn.escheduler.server.quartz.QuartzExecutors.buildJobName; - -/** - * process schedule job - *

- * {@link Job} - *

- */ -public class ProcessScheduleJob implements Job { - - private static final Logger logger = LoggerFactory.getLogger(ProcessScheduleJob.class); - - /** - * {@link ProcessDao} - */ - private static ProcessDao processDao; - - - /** - * init - */ - public static void init(ProcessDao processDao) { - ProcessScheduleJob.processDao = processDao; - } - - /** - *

- * Called by the {@link Scheduler} when a {@link Trigger} - * fires that is associated with the Job. - *

- * - *

- * The implementation may wish to set a - * {@link JobExecutionContext#setResult(Object) result} object on the - * {@link JobExecutionContext} before this method exits. The result itself - * is meaningless to Quartz, but may be informative to - * {@link JobListener}s or - * {@link TriggerListener}s that are watching the job's - * execution. - *

- * - * @throws JobExecutionException if there is an exception while executing the job. - */ - @Override - public void execute(JobExecutionContext context) throws JobExecutionException { - - //TODO... - Assert.notNull(processDao, "please call init() method first"); - - JobDataMap dataMap = context.getJobDetail().getJobDataMap(); - - int projectId = dataMap.getInt(Constants.PROJECT_ID); - int scheduleId = dataMap.getInt(Constants.SCHEDULE_ID); - - - Date scheduledFireTime = context.getScheduledFireTime(); - - - Date fireTime = context.getFireTime(); - - logger.info("scheduled fire time :{}, fire time :{}, process id :{}", scheduledFireTime, fireTime, scheduleId); - - // query schedule - Schedule schedule = processDao.querySchedule(scheduleId); - if (schedule == null) { - logger.warn("process schedule does not exist in db,delete schedule job in quartz, projectId:{}, scheduleId:{}", projectId, scheduleId); - deleteJob(projectId, scheduleId); - return; - } - - - ProcessDefinition processDefinition = processDao.findProcessDefineById(schedule.getProcessDefinitionId()); - // release state : online/offline - ReleaseState releaseState = processDefinition.getReleaseState(); - if (processDefinition == null || releaseState == ReleaseState.OFFLINE) { - logger.warn("process definition does not exist in db or offline,need not to create command, projectId:{}, processId:{}", projectId, scheduleId); - return; - } - - Command command = new Command(); - command.setCommandType(CommandType.SCHEDULER); - command.setExecutorId(schedule.getUserId()); - command.setFailureStrategy(schedule.getFailureStrategy()); - command.setProcessDefinitionId(schedule.getProcessDefinitionId()); - command.setScheduleTime(scheduledFireTime); - command.setStartTime(fireTime); - command.setWarningGroupId(schedule.getWarningGroupId()); - command.setWorkerGroupId(schedule.getWorkerGroupId()); - command.setWarningType(schedule.getWarningType()); - command.setProcessInstancePriority(schedule.getProcessInstancePriority()); - - processDao.createCommand(command); - } - - - /** - * delete job - */ - private void deleteJob(int projectId, int scheduleId) { - String jobName = buildJobName(scheduleId); - String jobGroupName = buildJobGroupName(projectId); - QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName); - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/quartz/QuartzExecutors.java b/escheduler-server/src/main/java/cn/escheduler/server/quartz/QuartzExecutors.java deleted file mode 100644 index 5c610e759d..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/quartz/QuartzExecutors.java +++ /dev/null @@ -1,311 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.quartz; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.entity.Schedule; -import org.apache.commons.lang.StringUtils; -import org.quartz.*; -import org.quartz.impl.StdSchedulerFactory; -import org.quartz.impl.matchers.GroupMatcher; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; -import java.util.Calendar; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; - -import static org.quartz.CronScheduleBuilder.cronSchedule; -import static org.quartz.JobBuilder.newJob; -import static org.quartz.TriggerBuilder.newTrigger; - -/** - * single Quartz executors instance - */ -public class QuartzExecutors { - - private static final Logger logger = LoggerFactory.getLogger(QuartzExecutors.class); - - private final ReadWriteLock lock = new ReentrantReadWriteLock(); - - - /** - *

- * A Scheduler maintains a registry of {@link org.quartz.JobDetail}s - * and {@link Trigger}s. Once registered, the Scheduler - * is responsible for executing Job s when their associated - * Trigger s fire (when their scheduled time arrives). - *

- * {@link Scheduler} - */ - private static Scheduler scheduler; - - private static volatile QuartzExecutors INSTANCE = null; - - private QuartzExecutors() {} - - /** - * thread safe and performance promote - * @return - */ - public static QuartzExecutors getInstance() { - if (INSTANCE == null) { - synchronized (QuartzExecutors.class) { - // when more than two threads run into the first null check same time, to avoid instanced more than one time, it needs to be checked again. - if (INSTANCE == null) { - INSTANCE = new QuartzExecutors(); - //finish QuartzExecutors init - INSTANCE.init(); - } - } - } - return INSTANCE; - } - - - /** - * init - * - *

- * Returns a client-usable handle to a Scheduler. - *

- */ - private void init() { - try { - SchedulerFactory schedulerFactory = new StdSchedulerFactory(Constants.QUARTZ_PROPERTIES_PATH); - scheduler = schedulerFactory.getScheduler(); - - } catch (SchedulerException e) { - logger.error(e.getMessage(),e); - System.exit(1); - } - - } - - /** - * Whether the scheduler has been started. - * - *

- * Note: This only reflects whether {@link #start()} has ever - * been called on this Scheduler, so it will return true even - * if the Scheduler is currently in standby mode or has been - * since shutdown. - *

- * - * @see Scheduler#start() - */ - public void start() throws SchedulerException { - if (!scheduler.isStarted()){ - scheduler.start(); - logger.info("Quartz service started" ); - } - } - - /** - * stop all scheduled tasks - * - * Halts the Scheduler's firing of {@link Trigger}s, - * and cleans up all resources associated with the Scheduler. Equivalent to - * shutdown(false). - * - *

- * The scheduler cannot be re-started. - *

- * - */ - public void shutdown() throws SchedulerException { - if (!scheduler.isShutdown()) { - // don't wait for the task to complete - scheduler.shutdown(); - logger.info("Quartz service stopped, and halt all tasks"); - } - } - - - /** - * add task trigger , if this task already exists, return this task with updated trigger - * - * @param clazz job class name - * @param jobName job name - * @param jobGroupName job group name - * @param startDate job start date - * @param endDate job end date - * @param cronExpression cron expression - * @param jobDataMap job parameters data map - * @return - */ - public void addJob(Class clazz,String jobName,String jobGroupName,Date startDate, Date endDate, - String cronExpression, - Map jobDataMap) { - lock.writeLock().lock(); - try { - - JobKey jobKey = new JobKey(jobName, jobGroupName); - JobDetail jobDetail; - //add a task (if this task already exists, return this task directly) - if (scheduler.checkExists(jobKey)) { - - jobDetail = scheduler.getJobDetail(jobKey); - if (jobDataMap != null) { - jobDetail.getJobDataMap().putAll(jobDataMap); - } - } else { - jobDetail = newJob(clazz).withIdentity(jobKey).build(); - - if (jobDataMap != null) { - jobDetail.getJobDataMap().putAll(jobDataMap); - } - - scheduler.addJob(jobDetail, false, true); - - logger.info("Add job, job name: {}, group name: {}", - jobName, jobGroupName); - } - - TriggerKey triggerKey = new TriggerKey(jobName, jobGroupName); - /** - * Instructs the {@link Scheduler} that upon a mis-fire - * situation, the {@link CronTrigger} wants to have it's - * next-fire-time updated to the next time in the schedule after the - * current time (taking into account any associated {@link Calendar}, - * but it does not want to be fired now. - */ - CronTrigger cronTrigger = newTrigger().withIdentity(triggerKey).startAt(startDate).endAt(endDate) - .withSchedule(cronSchedule(cronExpression).withMisfireHandlingInstructionDoNothing()) - .forJob(jobDetail).build(); - - if (scheduler.checkExists(triggerKey)) { - // updateProcessInstance scheduler trigger when scheduler cycle changes - CronTrigger oldCronTrigger = (CronTrigger) scheduler.getTrigger(triggerKey); - String oldCronExpression = oldCronTrigger.getCronExpression(); - - if (!StringUtils.equalsIgnoreCase(cronExpression,oldCronExpression)) { - // reschedule job trigger - scheduler.rescheduleJob(triggerKey, cronTrigger); - logger.info("reschedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", - jobName, jobGroupName, cronExpression, startDate, endDate); - } - } else { - scheduler.scheduleJob(cronTrigger); - logger.info("schedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", - jobName, jobGroupName, cronExpression, startDate, endDate); - } - - } catch (Exception e) { - logger.error("add job failed", e); - throw new RuntimeException("add job failed:"+e.getMessage()); - } finally { - lock.writeLock().unlock(); - } - } - - - /** - * delete job - * - * @param jobName - * @param jobGroupName - * @return true if the Job was found and deleted. - */ - public boolean deleteJob(String jobName, String jobGroupName) { - lock.writeLock().lock(); - try { - JobKey jobKey = new JobKey(jobName,jobGroupName); - if(scheduler.checkExists(jobKey)){ - logger.info("try to delete job, job name: {}, job group name: {},", jobName, jobGroupName); - return scheduler.deleteJob(jobKey); - }else { - return true; - } - - } catch (SchedulerException e) { - logger.error(String.format("delete job : %s failed",jobName), e); - } finally { - lock.writeLock().unlock(); - } - return false; - } - - /** - * delete all jobs in job group - *

- * Note that while this bulk operation is likely more efficient than - * invoking deleteJob(JobKey jobKey) several - * times, it may have the adverse affect of holding data locks for a - * single long duration of time (rather than lots of small durations - * of time). - *

- * - * @param jobGroupName - * - * @return true if all of the Jobs were found and deleted, false if - * one or more were not deleted. - */ - public boolean deleteAllJobs(String jobGroupName) { - lock.writeLock().lock(); - try { - logger.info("try to delete all jobs in job group: {}", jobGroupName); - List jobKeys = new ArrayList<>(); - jobKeys.addAll(scheduler.getJobKeys(GroupMatcher.groupEndsWith(jobGroupName))); - - return scheduler.deleteJobs(jobKeys); - } catch (SchedulerException e) { - logger.error(String.format("delete all jobs in job group: %s failed",jobGroupName), e); - } finally { - lock.writeLock().unlock(); - } - return false; - } - - /** - * build job name - */ - public static String buildJobName(int processId) { - StringBuilder sb = new StringBuilder(30); - sb.append(Constants.QUARTZ_JOB_PRIFIX).append(Constants.UNDERLINE).append(processId); - return sb.toString(); - } - - /** - * build job group name - */ - public static String buildJobGroupName(int projectId) { - StringBuilder sb = new StringBuilder(30); - sb.append(Constants.QUARTZ_JOB_GROUP_PRIFIX).append(Constants.UNDERLINE).append(projectId); - return sb.toString(); - } - - /** - * add params to map - * - * @param projectId - * @param scheduleId - * @param schedule - * @return - */ - public static Map buildDataMap(int projectId, int scheduleId, Schedule schedule) { - Map dataMap = new HashMap<>(3); - dataMap.put(Constants.PROJECT_ID, projectId); - dataMap.put(Constants.SCHEDULE_ID, scheduleId); - dataMap.put(Constants.SCHEDULE, JSONUtils.toJson(schedule)); - - return dataMap; - } - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/rpc/LogClient.java b/escheduler-server/src/main/java/cn/escheduler/server/rpc/LogClient.java deleted file mode 100644 index 745bd323bf..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/rpc/LogClient.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.rpc; - -import cn.escheduler.rpc.*; -import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; -import io.grpc.StatusRuntimeException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.concurrent.TimeUnit; - -/** - * log client - */ -public class LogClient { - - private static final Logger logger = LoggerFactory.getLogger(LogClient.class); - - private final ManagedChannel channel; - private final LogViewServiceGrpc.LogViewServiceBlockingStub blockingStub; - - /** Construct client connecting to HelloWorld server at {@code host:port}. */ - public LogClient(String host, int port) { - this(ManagedChannelBuilder.forAddress(host, port) - // Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid - // needing certificates. - .usePlaintext(true)); - } - - /** Construct client for accessing RouteGuide server using the existing channel. */ - LogClient(ManagedChannelBuilder channelBuilder) { - /** - * set max message read size - */ - channelBuilder.maxInboundMessageSize(Integer.MAX_VALUE); - channel = channelBuilder.build(); - blockingStub = LogViewServiceGrpc.newBlockingStub(channel); - } - - public void shutdown() throws InterruptedException { - channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); - } - - /** - * roll view log - * @param path - * @param skipLineNum - * @param limit - * @return - */ - public String rollViewLog(String path,int skipLineNum,int limit) { - logger.info("roll view log , path : {},skipLineNum : {} ,limit :{}", path, skipLineNum, limit); - LogParameter pathParameter = LogParameter - .newBuilder() - .setPath(path) - .setSkipLineNum(skipLineNum) - .setLimit(limit) - .build(); - RetStrInfo retStrInfo; - try { - retStrInfo = blockingStub.rollViewLog(pathParameter); - return retStrInfo.getMsg(); - } catch (StatusRuntimeException e) { - logger.error("roll view log failed : " + e.getMessage(), e); - return null; - } - } - - /** - * view all log - * @param path - * @return - */ - public String viewLog(String path) { - logger.info("view log path : {}",path); - - PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); - RetStrInfo retStrInfo; - try { - retStrInfo = blockingStub.viewLog(pathParameter); - return retStrInfo.getMsg(); - } catch (StatusRuntimeException e) { - logger.error("view log failed : " + e.getMessage(), e); - return null; - } - } - - /** - * get log bytes - * @param path - * @return - */ - public byte[] getLogBytes(String path) { - logger.info("get log bytes {}",path); - - PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); - RetByteInfo retByteInfo; - try { - retByteInfo = blockingStub.getLogBytes(pathParameter); - return retByteInfo.getData().toByteArray(); - } catch (StatusRuntimeException e) { - logger.error("get log bytes failed : " + e.getMessage(), e); - return null; - } - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/rpc/LoggerServer.java b/escheduler-server/src/main/java/cn/escheduler/server/rpc/LoggerServer.java deleted file mode 100644 index 1dde113545..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/rpc/LoggerServer.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.rpc; - -import cn.escheduler.common.Constants; -import cn.escheduler.rpc.*; -import com.google.protobuf.ByteString; -import io.grpc.Server; -import io.grpc.ServerBuilder; -import io.grpc.stub.StreamObserver; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * logger server - */ -public class LoggerServer { - - private static final Logger logger = LoggerFactory.getLogger(LoggerServer.class); - - /** - * server - */ - private Server server; - - public void start() throws IOException { - /* The port on which the server should run */ - int port = Constants.RPC_PORT; - server = ServerBuilder.forPort(port) - .addService(new LogViewServiceGrpcImpl()) - .build() - .start(); - logger.info("server started, listening on port : {}" , port); - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - // Use stderr here since the logger may have been reset by its JVM shutdown hook. - logger.info("shutting down gRPC server since JVM is shutting down"); - LoggerServer.this.stop(); - logger.info("server shut down"); - } - }); - } - - private void stop() { - if (server != null) { - server.shutdown(); - } - } - - /** - * await termination on the main thread since the grpc library uses daemon threads. - */ - private void blockUntilShutdown() throws InterruptedException { - if (server != null) { - server.awaitTermination(); - } - } - - /** - * main launches the server from the command line. - */ - public static void main(String[] args) throws IOException, InterruptedException { - final LoggerServer server = new LoggerServer(); - server.start(); - server.blockUntilShutdown(); - } - - - static class LogViewServiceGrpcImpl extends LogViewServiceGrpc.LogViewServiceImplBase { - @Override - public void rollViewLog(LogParameter request, StreamObserver responseObserver) { - - logger.info("log parameter path : {} ,skip line : {}, limit : {}", - request.getPath(), - request.getSkipLineNum(), - request.getLimit()); - List list = readFile(request.getPath(), request.getSkipLineNum(), request.getLimit()); - StringBuilder sb = new StringBuilder(); - boolean errorLineFlag = false; - for (String line : list){ - sb.append(line + "\r\n"); - } - RetStrInfo retInfoBuild = RetStrInfo.newBuilder().setMsg(sb.toString()).build(); - responseObserver.onNext(retInfoBuild); - responseObserver.onCompleted(); - } - - @Override - public void viewLog(PathParameter request, StreamObserver responseObserver) { - logger.info("task path is : {} " , request.getPath()); - RetStrInfo retInfoBuild = RetStrInfo.newBuilder().setMsg(readFile(request.getPath())).build(); - responseObserver.onNext(retInfoBuild); - responseObserver.onCompleted(); - } - - @Override - public void getLogBytes(PathParameter request, StreamObserver responseObserver) { - try { - ByteString bytes = ByteString.copyFrom(getFileBytes(request.getPath())); - RetByteInfo.Builder builder = RetByteInfo.newBuilder(); - builder.setData(bytes); - responseObserver.onNext(builder.build()); - responseObserver.onCompleted(); - }catch (Exception e){ - logger.error("get log bytes failed",e); - } - } - } - - /** - * get files bytes - * @param path - * @return - * @throws Exception - */ - private static byte[] getFileBytes(String path){ - InputStream in = null; - ByteArrayOutputStream bos = null; - try { - in = new FileInputStream(path); - bos = new ByteArrayOutputStream(); - byte[] buf = new byte[1024]; - int len = 0; - while ((len = in.read(buf)) != -1) { - bos.write(buf, 0, len); - } - return bos.toByteArray(); - }catch (IOException e){ - logger.error("get file bytes error",e); - }finally { - if (bos != null){ - try { - bos.close(); - } catch (IOException e) { - e.printStackTrace(); - } - } - if (in != null){ - try { - in.close(); - } catch (IOException e) { - e.printStackTrace(); - } - } - } - return null; - } - - /** - * read file content - * @param path - * @param skipLine - * @param limit - * @return - */ - private static List readFile(String path,int skipLine,int limit){ - try (Stream stream = Files.lines(Paths.get(path))) { - return stream.skip(skipLine).limit(limit).collect(Collectors.toList()); - } catch (IOException e) { - logger.error("read file failed",e); - } - return null; - } - - /** - * read file content - * @param path - * @return - * @throws Exception - */ - private static String readFile(String path){ - BufferedReader br = null; - String line = null; - StringBuilder sb = new StringBuilder(); - try { - br = new BufferedReader(new InputStreamReader(new FileInputStream(path))); - boolean errorLineFlag = false; - while ((line = br.readLine()) != null){ - sb.append(line + "\r\n"); - } - - return sb.toString(); - }catch (IOException e){ - logger.error("read file failed",e); - }finally { - try { - if (br != null){ - br.close(); - } - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - } - return null; - } - -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/utils/AlertManager.java b/escheduler-server/src/main/java/cn/escheduler/server/utils/AlertManager.java deleted file mode 100644 index d0de7a30ca..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/utils/AlertManager.java +++ /dev/null @@ -1,240 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.utils; - - -import cn.escheduler.common.enums.AlertType; -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.enums.ShowType; -import cn.escheduler.common.enums.WarningType; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.AlertDao; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.entity.Alert; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Date; -import java.util.LinkedHashMap; -import java.util.List; - -/** - * alert manager - */ -public class AlertManager { - - private static final Logger logger = LoggerFactory.getLogger(AlertManager.class); - - private AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); - - - /** - * command type convert chinese - * @param commandType - * @return - */ - private String getCommandCnName(CommandType commandType) { - switch (commandType) { - case RECOVER_TOLERANCE_FAULT_PROCESS: - return "recover tolerance fault process"; - case RECOVER_SUSPENDED_PROCESS: - return "recover suspended process"; - case START_CURRENT_TASK_PROCESS: - return "start current task process"; - case START_FAILURE_TASK_PROCESS: - return "start failure task process"; - case START_PROCESS: - return "start process"; - case REPEAT_RUNNING: - return "repeat running"; - case SCHEDULER: - return "scheduler"; - case COMPLEMENT_DATA: - return "complement data"; - case PAUSE: - return "pause"; - case STOP: - return "stop"; - default: - return "unknown type"; - } - } - - /** - * process instance format - */ - private static final String PROCESS_INSTANCE_FORMAT = - "\"Id:%d\"," + - "\"Name:%s\"," + - "\"Job type: %s\"," + - "\"State: %s\"," + - "\"Recovery:%s\"," + - "\"Run time: %d\"," + - "\"Start time: %s\"," + - "\"End time: %s\"," + - "\"Host: %s\"" ; - - /** - * get process instance content - * @param processInstance - * @return - */ - public String getContentProcessInstance(ProcessInstance processInstance, - List taskInstances){ - - String res = ""; - if(processInstance.getState().typeIsSuccess()){ - res = String.format(PROCESS_INSTANCE_FORMAT, - processInstance.getId(), - processInstance.getName(), - getCommandCnName(processInstance.getCommandType()), - processInstance.getState().toString(), - processInstance.getRecovery().toString(), - processInstance.getRunTimes(), - DateUtils.dateToString(processInstance.getStartTime()), - DateUtils.dateToString(processInstance.getEndTime()), - processInstance.getHost() - - ); - res = "[" + res + "]"; - }else if(processInstance.getState().typeIsFailure()){ - - List failedTaskList = new ArrayList<>(); - - for(TaskInstance task : taskInstances){ - if(task.getState().typeIsSuccess()){ - continue; - } - LinkedHashMap failedTaskMap = new LinkedHashMap(); - failedTaskMap.put("task id", String.valueOf(task.getId())); - failedTaskMap.put("task name", task.getName()); - failedTaskMap.put("task type", task.getTaskType()); - failedTaskMap.put("task state", task.getState().toString()); - failedTaskMap.put("task start time", DateUtils.dateToString(task.getStartTime())); - failedTaskMap.put("task end time", DateUtils.dateToString(task.getEndTime())); - failedTaskMap.put("host", task.getHost()); - failedTaskMap.put("log path", task.getLogPath()); - failedTaskList.add(failedTaskMap); - } - res = JSONUtils.toJson(failedTaskList); - } - - return res; - } - - /** - * getting worker fault tolerant content - * @param processInstance - * @param toleranceTaskList - * @return - */ - private String getWorkerToleranceContent(ProcessInstance processInstance, List toleranceTaskList){ - - List> toleranceTaskInstanceList = new ArrayList<>(); - - for(TaskInstance taskInstance: toleranceTaskList){ - LinkedHashMap toleranceWorkerContentMap = new LinkedHashMap(); - toleranceWorkerContentMap.put("process name", processInstance.getName()); - toleranceWorkerContentMap.put("task name", taskInstance.getName()); - toleranceWorkerContentMap.put("host", taskInstance.getHost()); - toleranceWorkerContentMap.put("task retry times", String.valueOf(taskInstance.getRetryTimes())); - toleranceTaskInstanceList.add(toleranceWorkerContentMap); - } - return JSONUtils.toJson(toleranceTaskInstanceList); - } - - /** - * send worker alert fault tolerance - * @param processInstance - * @param toleranceTaskList - */ - public void sendAlertWorkerToleranceFault(ProcessInstance processInstance, List toleranceTaskList){ - Alert alert = new Alert(); - alert.setTitle("worker fault tolerance"); - alert.setShowType(ShowType.TABLE); - String content = getWorkerToleranceContent(processInstance, toleranceTaskList); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setCreateTime(new Date()); - alert.setAlertGroupId(processInstance.getWarningGroupId() == null ? 1:processInstance.getWarningGroupId()); - alert.setReceivers(processInstance.getProcessDefinition().getReceivers()); - alert.setReceiversCc(processInstance.getProcessDefinition().getReceiversCc()); - - alertDao.addAlert(alert); - logger.info("add alert to db , alert : {}", alert.toString()); - - } - - /** - * send process instance alert - * @param processInstance - */ - public void sendAlertProcessInstance(ProcessInstance processInstance, - List taskInstances){ - - boolean sendWarnning = false; - WarningType warningType = processInstance.getWarningType(); - switch (warningType){ - case ALL: - if(processInstance.getState().typeIsFinished()){ - sendWarnning = true; - } - break; - case SUCCESS: - if(processInstance.getState().typeIsSuccess()){ - sendWarnning = true; - } - break; - case FAILURE: - if(processInstance.getState().typeIsFailure()){ - sendWarnning = true; - } - break; - default: - } - if(!sendWarnning){ - return; - } - Alert alert = new Alert(); - - - String cmdName = getCommandCnName(processInstance.getCommandType()); - String success = processInstance.getState().typeIsSuccess() ? "success" :"failed"; - alert.setTitle(cmdName + success); - ShowType showType = processInstance.getState().typeIsSuccess() ? ShowType.TEXT : ShowType.TABLE; - alert.setShowType(showType); - String content = getContentProcessInstance(processInstance, taskInstances); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(processInstance.getWarningGroupId()); - alert.setCreateTime(new Date()); - alert.setReceivers(processInstance.getProcessDefinition().getReceivers()); - alert.setReceiversCc(processInstance.getProcessDefinition().getReceiversCc()); - - alertDao.addAlert(alert); - logger.info("add alert to db , alert: {}", alert.toString()); - } - - public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition) { - alertDao.sendProcessTimeoutAlert(processInstance, processDefinition); - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/utils/FlinkArgsUtils.java b/escheduler-server/src/main/java/cn/escheduler/server/utils/FlinkArgsUtils.java deleted file mode 100644 index 308103073d..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/utils/FlinkArgsUtils.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.utils; - - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ProgramType; -import cn.escheduler.common.task.flink.FlinkParameters; -import org.apache.commons.lang.StringUtils; - -import java.util.ArrayList; -import java.util.List; - - -/** - * spark args utils - */ -public class FlinkArgsUtils { - - /** - * build args - * @param param - * @return - */ - public static List buildArgs(FlinkParameters param) { - List args = new ArrayList<>(); - - args.add(Constants.FLINK_RUN_MODE); //-m - - args.add(Constants.FLINK_YARN_CLUSTER); //yarn-cluster - - if (param.getSlot() != 0) { - args.add(Constants.FLINK_YARN_SLOT); - args.add(String.format("%d", param.getSlot())); //-ys - } - - if (StringUtils.isNotEmpty(param.getAppName())) { //-ynm - args.add(Constants.FLINK_APP_NAME); - args.add(param.getAppName()); - } - - if (param.getTaskManager() != 0) { //-yn - args.add(Constants.FLINK_TASK_MANAGE); - args.add(String.format("%d", param.getTaskManager())); - } - - if (StringUtils.isNotEmpty(param.getJobManagerMemory())) { - args.add(Constants.FLINK_JOB_MANAGE_MEM); - args.add(param.getJobManagerMemory()); //-yjm - } - - if (StringUtils.isNotEmpty(param.getTaskManagerMemory())) { // -ytm - args.add(Constants.FLINK_TASK_MANAGE_MEM); - args.add(param.getTaskManagerMemory()); - } - args.add(Constants.FLINK_detach); //-d - - - if(param.getProgramType() !=null ){ - if(param.getProgramType()!=ProgramType.PYTHON){ - if (StringUtils.isNotEmpty(param.getMainClass())) { - args.add(Constants.FLINK_MAIN_CLASS); //-c - args.add(param.getMainClass()); //main class - } - } - } - - if (param.getMainJar() != null) { - args.add(param.getMainJar().getRes()); - } - - - // --files --conf --libjar ... - if (StringUtils.isNotEmpty(param.getOthers())) { - String others = param.getOthers(); - if(!others.contains("--queue")){ - if (StringUtils.isNotEmpty(param.getQueue())) { - args.add(Constants.SPARK_QUEUE); - args.add(param.getQueue()); - } - } - args.add(param.getOthers()); - }else if (StringUtils.isNotEmpty(param.getQueue())) { - args.add(Constants.SPARK_QUEUE); - args.add(param.getQueue()); - - } - - if (StringUtils.isNotEmpty(param.getMainArgs())) { - args.add(param.getMainArgs()); - } - - return args; - } - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/utils/LoggerUtils.java b/escheduler-server/src/main/java/cn/escheduler/server/utils/LoggerUtils.java deleted file mode 100644 index a4207d3793..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/utils/LoggerUtils.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.utils; - -import cn.escheduler.common.Constants; -import org.slf4j.Logger; - -import java.util.ArrayList; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * logger utils - */ -public class LoggerUtils { - - /** - * rules for extracting application ID - */ - private static final Pattern APPLICATION_REGEX = Pattern.compile(Constants.APPLICATION_REGEX); - - /** - * Task Logger's prefix - */ - public static final String TASK_LOGGER_INFO_PREFIX = "TASK"; - - public static final String TASK_LOGGER_THREAD_NAME = "TaskLogInfo"; - - /** - * build job id - * @param affix - * @param processDefId - * @param processInstId - * @param taskId - * @return - */ - public static String buildTaskId(String affix, - int processDefId, - int processInstId, - int taskId){ - // - [taskAppId=TASK_79_4084_15210] - return String.format(" - [taskAppId=%s-%s-%s-%s]",affix, - processDefId, - processInstId, - taskId); - } - - - /** - * processing log - * get yarn application id list - * @param log - * @param logger - * @return - */ - public static List getAppIds(String log, Logger logger) { - - List appIds = new ArrayList(); - - Matcher matcher = APPLICATION_REGEX.matcher(log); - - // analyse logs to get all submit yarn application id - while (matcher.find()) { - String appId = matcher.group(); - if(!appIds.contains(appId)){ - logger.info("find app id: {}", appId); - appIds.add(appId); - } - } - return appIds; - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/utils/ParamUtils.java b/escheduler-server/src/main/java/cn/escheduler/server/utils/ParamUtils.java deleted file mode 100644 index b16e9f3092..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/utils/ParamUtils.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.utils; - -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.common.utils.placeholder.BusinessTimeUtils; - -import java.util.Date; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -/** - * param utils - */ -public class ParamUtils { - - /** - * parameter conversion - * - * @param globalParams - * @param localParams - * @return - */ - public static Map convert(Map globalParams, - Map globalParamsMap, - Map localParams, - CommandType commandType, - Date scheduleTime){ - if (globalParams == null - && localParams == null){ - return null; - } - // if it is a complement, - // you need to pass in the task instance id to locate the time - // of the process instance complement - Map timeParams = BusinessTimeUtils - .getBusinessTime(commandType, - scheduleTime); - - if (globalParamsMap != null){ - timeParams.putAll(globalParamsMap); - } - - if (globalParams != null && localParams != null){ - globalParams.putAll(localParams); - }else if (globalParams == null && localParams != null){ - globalParams = localParams; - } - Iterator> iter = globalParams.entrySet().iterator(); - while (iter.hasNext()){ - Map.Entry en = iter.next(); - Property property = en.getValue(); - - if (property.getValue() != null && property.getValue().length() > 0){ - if (property.getValue().startsWith("$")){ - /** - * local parameter refers to global parameter with the same name - * note: the global parameters of the process instance here are solidified parameters, - * and there are no variables in them. - */ - String val = property.getValue(); - val = ParameterUtils.convertParameterPlaceholders(val, timeParams); - property.setValue(val); - } - } - } - - return globalParams; - } - - /** - * format convert - * @param paramsMap - * @return - */ - public static Map convert(Map paramsMap){ - Map map = new HashMap<>(); - Iterator> iter = paramsMap.entrySet().iterator(); - while (iter.hasNext()){ - Map.Entry en = iter.next(); - map.put(en.getKey(),en.getValue().getValue()); - } - return map; - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/utils/ProcessUtils.java b/escheduler-server/src/main/java/cn/escheduler/server/utils/ProcessUtils.java deleted file mode 100644 index ff24616300..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/utils/ProcessUtils.java +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.utils; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.utils.CommonUtils; -import cn.escheduler.common.utils.OSUtils; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.server.rpc.LogClient; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - - -/** - * mainly used to get the start command line of a process - */ -public class ProcessUtils { - /** - * logger - */ - private final static Logger logger = LoggerFactory.getLogger(ProcessUtils.class); - - /** - * build command line characters - * @return - */ - public static String buildCommandStr(List commandList) throws IOException { - String cmdstr; - String[] cmd = commandList.toArray(new String[commandList.size()]); - SecurityManager security = System.getSecurityManager(); - boolean allowAmbiguousCommands = false; - if (security == null) { - allowAmbiguousCommands = true; - String value = System.getProperty("jdk.lang.Process.allowAmbiguousCommands"); - if (value != null) { - allowAmbiguousCommands = !"false".equalsIgnoreCase(value); - } - } - if (allowAmbiguousCommands) { - - String executablePath = new File(cmd[0]).getPath(); - - if (needsEscaping(VERIFICATION_LEGACY, executablePath)) { - executablePath = quoteString(executablePath); - } - - cmdstr = createCommandLine( - VERIFICATION_LEGACY, executablePath, cmd); - } else { - String executablePath; - try { - executablePath = getExecutablePath(cmd[0]); - } catch (IllegalArgumentException e) { - - StringBuilder join = new StringBuilder(); - for (String s : cmd) { - join.append(s).append(' '); - } - - cmd = getTokensFromCommand(join.toString()); - executablePath = getExecutablePath(cmd[0]); - - // Check new executable name once more - if (security != null) { - security.checkExec(executablePath); - } - } - - - cmdstr = createCommandLine( - - isShellFile(executablePath) ? VERIFICATION_CMD_BAT : VERIFICATION_WIN32, quoteString(executablePath), cmd); - } - return cmdstr; - } - - private static String getExecutablePath(String path) throws IOException { - boolean pathIsQuoted = isQuoted(true, path, "Executable name has embedded quote, split the arguments"); - - File fileToRun = new File(pathIsQuoted ? path.substring(1, path.length() - 1) : path); - return fileToRun.getPath(); - } - - private static boolean isShellFile(String executablePath) { - String upPath = executablePath.toUpperCase(); - return (upPath.endsWith(".CMD") || upPath.endsWith(".BAT")); - } - - private static String quoteString(String arg) { - StringBuilder argbuf = new StringBuilder(arg.length() + 2); - return argbuf.append('"').append(arg).append('"').toString(); - } - - - private static String[] getTokensFromCommand(String command) { - ArrayList matchList = new ArrayList<>(8); - Matcher regexMatcher = LazyPattern.PATTERN.matcher(command); - while (regexMatcher.find()) { - matchList.add(regexMatcher.group()); - } - return matchList.toArray(new String[matchList.size()]); - } - - private static class LazyPattern { - // Escape-support version: - // "(\")((?:\\\\\\1|.)+?)\\1|([^\\s\"]+)"; - private static final Pattern PATTERN = Pattern.compile("[^\\s\"]+|\"[^\"]*\""); - } - - private static final int VERIFICATION_CMD_BAT = 0; - - private static final int VERIFICATION_WIN32 = 1; - - private static final int VERIFICATION_LEGACY = 2; - - private static final char[][] ESCAPE_VERIFICATION = {{' ', '\t', '<', '>', '&', '|', '^'}, - - {' ', '\t', '<', '>'}, {' ', '\t'}}; - - private static Matcher matcher; - - private static String createCommandLine(int verificationType, final String executablePath, final String[] cmd) { - StringBuilder cmdbuf = new StringBuilder(80); - - cmdbuf.append(executablePath); - - for (int i = 1; i < cmd.length; ++i) { - cmdbuf.append(' '); - String s = cmd[i]; - if (needsEscaping(verificationType, s)) { - cmdbuf.append('"').append(s); - - if ((verificationType != VERIFICATION_CMD_BAT) && s.endsWith("\\")) { - cmdbuf.append('\\'); - } - cmdbuf.append('"'); - } else { - cmdbuf.append(s); - } - } - return cmdbuf.toString(); - } - - private static boolean isQuoted(boolean noQuotesInside, String arg, String errorMessage) { - int lastPos = arg.length() - 1; - if (lastPos >= 1 && arg.charAt(0) == '"' && arg.charAt(lastPos) == '"') { - // The argument has already been quoted. - if (noQuotesInside) { - if (arg.indexOf('"', 1) != lastPos) { - // There is ["] inside. - throw new IllegalArgumentException(errorMessage); - } - } - return true; - } - if (noQuotesInside) { - if (arg.indexOf('"') >= 0) { - // There is ["] inside. - throw new IllegalArgumentException(errorMessage); - } - } - return false; - } - - private static boolean needsEscaping(int verificationType, String arg) { - - boolean argIsQuoted = isQuoted((verificationType == VERIFICATION_CMD_BAT), arg, "Argument has embedded quote, use the explicit CMD.EXE call."); - - if (!argIsQuoted) { - char[] testEscape = ESCAPE_VERIFICATION[verificationType]; - for (int i = 0; i < testEscape.length; ++i) { - if (arg.indexOf(testEscape[i]) >= 0) { - return true; - } - } - } - return false; - } - - - /** - * kill yarn application - * @param appIds - * @param logger - * @param tenantCode - * @throws IOException - */ - public static void cancelApplication(List appIds, Logger logger, String tenantCode,String workDir) - throws IOException { - if (appIds.size() > 0) { - String appid = appIds.get(appIds.size() - 1); - String commandFile = String - .format("%s/%s.kill", workDir, appid); - String cmd = "yarn application -kill " + appid; - try { - StringBuilder sb = new StringBuilder(); - sb.append("#!/bin/sh\n"); - sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); - sb.append("cd $BASEDIR\n"); - if (CommonUtils.getSystemEnvPath() != null) { - sb.append("source " + CommonUtils.getSystemEnvPath() + "\n"); - } - sb.append("\n\n"); - sb.append(cmd); - - File f = new File(commandFile); - - if (!f.exists()) { - FileUtils.writeStringToFile(new File(commandFile), sb.toString(), Charset.forName("UTF-8")); - } - - String runCmd = "sh " + commandFile; - if (StringUtils.isNotEmpty(tenantCode)) { - runCmd = "sudo -u " + tenantCode + " " + runCmd; - } - - logger.info("kill cmd:{}", runCmd); - - Runtime.getRuntime().exec(runCmd); - } catch (Exception e) { - logger.error("kill application failed : " + e.getMessage(), e); - } - } - } - - /** - * kill tasks according to different task types - * @param taskInstance - */ - public static void kill(TaskInstance taskInstance) { - try { - int processId = taskInstance.getPid(); - if(processId == 0 ){ - logger.error("process kill failed, process id :{}, task id:{}", - processId, taskInstance.getId()); - return ; - } - - String cmd = String.format("sudo kill -9 %s", getPidsStr(processId)); - - logger.info("process id:{}, cmd:{}", processId, cmd); - - OSUtils.exeCmd(cmd); - - // find log and kill yarn job - killYarnJob(taskInstance); - - } catch (Exception e) { - logger.error("kill failed : " + e.getMessage(), e); - } - } - - /** - * get pids str - * @param processId - * @return - * @throws Exception - */ - private static String getPidsStr(int processId)throws Exception{ - StringBuilder sb = new StringBuilder(); - // pstree -p pid get sub pids - String pids = OSUtils.exeCmd("pstree -p " +processId+ ""); - Matcher mat = Pattern.compile("(\\d+)").matcher(pids); - while (mat.find()){ - sb.append(mat.group()+" "); - } - return sb.toString().trim(); - } - - /** - * find logs and kill yarn tasks - * @param taskInstance - */ - public static void killYarnJob(TaskInstance taskInstance) { - try { - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - LogClient logClient = new LogClient(taskInstance.getHost(), Constants.RPC_PORT); - - String log = logClient.viewLog(taskInstance.getLogPath()); - if (StringUtils.isNotEmpty(log)) { - List appIds = LoggerUtils.getAppIds(log, logger); - String workerDir = taskInstance.getExecutePath(); - if (StringUtils.isEmpty(workerDir)) { - logger.error("task instance work dir is empty"); - throw new RuntimeException("task instance work dir is empty"); - } - if (appIds.size() > 0) { - cancelApplication(appIds, logger, taskInstance.getProcessInstance().getTenantCode(), taskInstance.getExecutePath()); - } - } - - } catch (Exception e) { - logger.error("kill yarn job failure",e); - } - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/utils/SparkArgsUtils.java b/escheduler-server/src/main/java/cn/escheduler/server/utils/SparkArgsUtils.java deleted file mode 100644 index b12c6ecef4..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/utils/SparkArgsUtils.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.utils; - - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ProgramType; -import cn.escheduler.common.task.spark.SparkParameters; -import org.apache.commons.lang.StringUtils; - -import java.util.ArrayList; -import java.util.List; - - -/** - * spark args utils - */ -public class SparkArgsUtils { - - /** - * build args - * @param param - * @return - */ - public static List buildArgs(SparkParameters param) { - List args = new ArrayList<>(); - String deployMode = "cluster"; - - args.add(Constants.MASTER); - if(StringUtils.isNotEmpty(param.getDeployMode())){ - deployMode = param.getDeployMode(); - - } - if(!"local".equals(deployMode)){ - args.add("yarn"); - args.add(Constants.DEPLOY_MODE); - } - - args.add(param.getDeployMode()); - - if(param.getProgramType() !=null ){ - if(param.getProgramType()!=ProgramType.PYTHON){ - if (StringUtils.isNotEmpty(param.getMainClass())) { - args.add(Constants.CLASS); - args.add(param.getMainClass()); - } - } - } - - - if (param.getDriverCores() != 0) { - args.add(Constants.DRIVER_CORES); - args.add(String.format("%d", param.getDriverCores())); - } - - if (StringUtils.isNotEmpty(param.getDriverMemory())) { - args.add(Constants.DRIVER_MEMORY); - args.add(param.getDriverMemory()); - } - - if (param.getNumExecutors() != 0) { - args.add(Constants.NUM_EXECUTORS); - args.add(String.format("%d", param.getNumExecutors())); - } - - if (param.getExecutorCores() != 0) { - args.add(Constants.EXECUTOR_CORES); - args.add(String.format("%d", param.getExecutorCores())); - } - - if (StringUtils.isNotEmpty(param.getExecutorMemory())) { - args.add(Constants.EXECUTOR_MEMORY); - args.add(param.getExecutorMemory()); - } - - // --files --conf --libjar ... - if (StringUtils.isNotEmpty(param.getOthers())) { - String others = param.getOthers(); - if(!others.contains("--queue")){ - if (StringUtils.isNotEmpty(param.getQueue())) { - args.add(Constants.SPARK_QUEUE); - args.add(param.getQueue()); - } - } - args.add(param.getOthers()); - }else if (StringUtils.isNotEmpty(param.getQueue())) { - args.add(Constants.SPARK_QUEUE); - args.add(param.getQueue()); - - } - - if (param.getMainJar() != null) { - args.add(param.getMainJar().getRes()); - } - - if (StringUtils.isNotEmpty(param.getMainArgs())) { - args.add(param.getMainArgs()); - } - - return args; - } - -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/utils/UDFUtils.java b/escheduler-server/src/main/java/cn/escheduler/server/utils/UDFUtils.java deleted file mode 100644 index 212ae0168d..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/utils/UDFUtils.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.utils; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.utils.HadoopUtils; -import cn.escheduler.dao.entity.UdfFunc; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; - -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import static cn.escheduler.common.utils.CollectionUtils.isNotEmpty; - -/** - * udf utils - */ -public class UDFUtils { - - /** - * create function format - */ - private static final String CREATE_FUNCTION_FORMAT = "create temporary function {0} as ''{1}''"; - - - /** - * create function list - */ - public static List createFuncs(List udfFuncs, String tenantCode,Logger logger){ - // get hive udf jar path - String hiveUdfJarPath = HadoopUtils.getHdfsUdfDir(tenantCode); - logger.info("hive udf jar path : {}" , hiveUdfJarPath); - - // is the root directory of udf defined - if (StringUtils.isEmpty(hiveUdfJarPath)) { - logger.error("not define hive udf jar path"); - throw new RuntimeException("hive udf jar base path not defined "); - } - Set resources = getFuncResouces(udfFuncs); - List funcList = new ArrayList<>(); - - // build jar sql - buildJarSql(funcList, resources, hiveUdfJarPath); - - // build temp function sql - buildTempFuncSql(funcList, udfFuncs); - - return funcList; - } - - /** - * build jar sql - */ - private static void buildJarSql(List sqls, Set resources, String uploadPath) { - String defaultFS = HadoopUtils.getInstance().getConfiguration().get(Constants.FS_DEFAULTFS); - if (!uploadPath.startsWith("hdfs:")) { - uploadPath = defaultFS + uploadPath; - } - - for (String resource : resources) { - sqls.add(String.format("add jar %s/%s", uploadPath, resource)); - } - } - - /** - * build temp function sql - */ - private static void buildTempFuncSql(List sqls, List udfFuncs) { - if (isNotEmpty(udfFuncs)) { - for (UdfFunc udfFunc : udfFuncs) { - sqls.add(MessageFormat - .format(CREATE_FUNCTION_FORMAT, udfFunc.getFuncName(), udfFunc.getClassName())); - } - } - } - - /** - * get the resource names of all functions - */ - private static Set getFuncResouces(List udfFuncs) { - Set resources = new HashSet<>(); - - for (UdfFunc udfFunc : udfFuncs) { - resources.add(udfFunc.getResourceName()); - } - - return resources; - } - - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/WorkerServer.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/WorkerServer.java deleted file mode 100644 index 13ee800312..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/WorkerServer.java +++ /dev/null @@ -1,335 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.TaskType; -import cn.escheduler.common.queue.ITaskQueue; -import cn.escheduler.common.queue.TaskQueueFactory; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.common.thread.ThreadPoolExecutors; -import cn.escheduler.common.thread.ThreadUtils; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.OSUtils; -import cn.escheduler.dao.AlertDao; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.server.master.AbstractServer; -import cn.escheduler.server.utils.ProcessUtils; -import cn.escheduler.server.worker.runner.FetchTaskThread; -import cn.escheduler.server.zk.ZKWorkerClient; -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.SpringApplication; -import org.springframework.boot.WebApplicationType; -import org.springframework.boot.builder.SpringApplicationBuilder; -import org.springframework.context.annotation.ComponentScan; - -import java.util.Set; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -/** - * worker server - */ -@ComponentScan("cn.escheduler") -public class WorkerServer extends AbstractServer { - - private static final Logger logger = LoggerFactory.getLogger(WorkerServer.class); - - - /** - * zk worker client - */ - private static ZKWorkerClient zkWorkerClient = null; - - /** - * process database access - */ - @Autowired - private ProcessDao processDao; - - /** - * alert database access - */ - @Autowired - private AlertDao alertDao; - - /** - * heartbeat thread pool - */ - private ScheduledExecutorService heartbeatWorerService; - - /** - * task queue impl - */ - protected ITaskQueue taskQueue; - - /** - * kill executor service - */ - private ExecutorService killExecutorService; - - /** - * fetch task executor service - */ - private ExecutorService fetchTaskExecutorService; - - public WorkerServer(){} - - public WorkerServer(ProcessDao processDao, AlertDao alertDao){ - try { - conf = new PropertiesConfiguration(Constants.WORKER_PROPERTIES_PATH); - }catch (ConfigurationException e){ - logger.error("load configuration failed",e); - System.exit(1); - } - - zkWorkerClient = ZKWorkerClient.getZKWorkerClient(); - - this.taskQueue = TaskQueueFactory.getTaskQueueInstance(); - - this.killExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Kill-Thread-Executor"); - - this.fetchTaskExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Fetch-Thread-Executor"); - } - - - /** - * master server startup - * - * master server not use web service - */ - public static void main(String[] args) { - - SpringApplication app = new SpringApplication(WorkerServer.class); - - app.run(args); - } - - - @Override - public void run(String... args) throws Exception { - // set the name of the current thread - Thread.currentThread().setName("Worker-Main-Thread"); - - WorkerServer workerServer = new WorkerServer(processDao,alertDao); - - workerServer.run(processDao,alertDao); - - logger.info("worker server started"); - - // blocking - workerServer.awaitTermination(); - } - - - public void run(ProcessDao processDao, AlertDao alertDao){ - - // heartbeat interval - heartBeatInterval = conf.getInt(Constants.WORKER_HEARTBEAT_INTERVAL, - Constants.defaultWorkerHeartbeatInterval); - - heartbeatWorerService = ThreadUtils.newDaemonThreadScheduledExecutor("Worker-Heartbeat-Thread-Executor", Constants.defaulWorkerHeartbeatThreadNum); - - // heartbeat thread implement - Runnable heartBeatThread = heartBeatThread(); - - zkWorkerClient.setStoppable(this); - - // regular heartbeat - // delay 5 seconds, send heartbeat every 30 seconds - heartbeatWorerService. - scheduleAtFixedRate(heartBeatThread, 5, heartBeatInterval, TimeUnit.SECONDS); - - // kill process thread implement - Runnable killProcessThread = getKillProcessThread(); - - // submit kill process thread - killExecutorService.execute(killProcessThread); - - /** - * register hooks, which are called before the process exits - */ - Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { - @Override - public void run() { - - logger.warn("worker server stopped"); - // worker server exit alert - if (zkWorkerClient.getActiveMasterNum() <= 1) { - for (int i = 0; i < Constants.ESCHEDULER_WARN_TIMES_FAILOVER;i++) { - alertDao.sendServerStopedAlert(1, OSUtils.getHost(), "Worker-Server"); - } - } - - } - })); - - // get worker number of concurrent tasks - int taskNum = conf.getInt(Constants.WORKER_FETCH_TASK_NUM,Constants.defaultWorkerFetchTaskNum); - - // new fetch task thread - FetchTaskThread fetchTaskThread = new FetchTaskThread(taskNum,zkWorkerClient, processDao,conf, taskQueue); - - // submit fetch task thread - fetchTaskExecutorService.execute(fetchTaskThread); - } - - @Override - public synchronized void stop(String cause) { - - try { - //execute only once - if(Stopper.isStoped()){ - return; - } - - logger.info("worker server is stopping ..., cause : {}", cause); - - // set stop signal is true - Stopper.stop(); - - try { - //thread sleep 3 seconds for thread quitely stop - Thread.sleep(3000L); - }catch (Exception e){ - logger.warn("thread sleep exception:" + e.getMessage(), e); - } - - try { - heartbeatWorerService.shutdownNow(); - }catch (Exception e){ - logger.warn("heartbeat service stopped exception"); - } - logger.info("heartbeat service stopped"); - - try { - ThreadPoolExecutors.getInstance().shutdown(); - }catch (Exception e){ - logger.warn("threadpool service stopped exception:{}",e.getMessage()); - } - - logger.info("threadpool service stopped"); - - try { - killExecutorService.shutdownNow(); - }catch (Exception e){ - logger.warn("worker kill executor service stopped exception:{}",e.getMessage()); - } - logger.info("worker kill executor service stopped"); - - try { - fetchTaskExecutorService.shutdownNow(); - }catch (Exception e){ - logger.warn("worker fetch task service stopped exception:{}",e.getMessage()); - } - logger.info("worker fetch task service stopped"); - - try{ - zkWorkerClient.close(); - }catch (Exception e){ - logger.warn("zookeeper service stopped exception:{}",e.getMessage()); - } - logger.info("zookeeper service stopped"); - - //notify - synchronized (lock) { - terminated = true; - lock.notifyAll(); - } - } catch (Exception e) { - logger.error("worker server stop exception : " + e.getMessage(), e); - System.exit(-1); - } - } - - - /** - * heartbeat thread implement - * @return - */ - private Runnable heartBeatThread(){ - Runnable heartBeatThread = new Runnable() { - @Override - public void run() { - // send heartbeat to zk - if (StringUtils.isEmpty(zkWorkerClient.getWorkerZNode())){ - logger.error("worker send heartbeat to zk failed"); - } - - zkWorkerClient.heartBeatForZk(zkWorkerClient.getWorkerZNode() , Constants.WORKER_PREFIX); - } - }; - return heartBeatThread; - } - - - /** - * kill process thread implement - * @return - */ - private Runnable getKillProcessThread(){ - Runnable killProcessThread = new Runnable() { - @Override - public void run() { - Set taskInfoSet = taskQueue.smembers(Constants.SCHEDULER_TASKS_KILL); - while (Stopper.isRunning()){ - try { - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } catch (InterruptedException e) { - logger.error("interrupted exception",e); - } - // if set is null , return - if (CollectionUtils.isNotEmpty(taskInfoSet)){ - for (String taskInfo : taskInfoSet){ - // task info start with current host - if (taskInfo.startsWith(OSUtils.getHost())){ - String[] taskInfoArr = taskInfo.split("-"); - if (taskInfoArr.length != 2){ - continue; - }else { - int taskInstId=Integer.parseInt(taskInfoArr[1]); - TaskInstance taskInstance = processDao.getTaskInstanceRelationByTaskId(taskInstId); - - if(taskInstance.getTaskType().equals(TaskType.DEPENDENT.toString())){ - taskInstance.setState(ExecutionStatus.KILL); - processDao.saveTaskInstance(taskInstance); - }else{ - ProcessUtils.kill(taskInstance); - } - taskQueue.srem(Constants.SCHEDULER_TASKS_KILL,taskInfo); - } - } - } - } - - taskInfoSet = taskQueue.smembers(Constants.SCHEDULER_TASKS_KILL); - } - } - }; - return killProcessThread; - } - -} - diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/log/TaskLogDiscriminator.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/log/TaskLogDiscriminator.java deleted file mode 100644 index 91852725d3..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/log/TaskLogDiscriminator.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.log; - -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.core.sift.AbstractDiscriminator; -import cn.escheduler.common.Constants; -import cn.escheduler.server.utils.LoggerUtils; - -public class TaskLogDiscriminator extends AbstractDiscriminator { - - private String key; - - private String logBase; - - /** - * logger name should be like: - * Task Logger name should be like: Task-{processDefinitionId}-{processInstanceId}-{taskInstanceId} - */ - @Override - public String getDiscriminatingValue(ILoggingEvent event) { - String loggerName = event.getLoggerName() - .split(Constants.EQUAL_SIGN)[1]; - String prefix = LoggerUtils.TASK_LOGGER_INFO_PREFIX + "-"; - if (loggerName.startsWith(prefix)) { - return loggerName.substring(prefix.length(), - loggerName.length() - 1).replace("-","/"); - } else { - return "unknown_task"; - } - } - - @Override - public void start() { - started = true; - } - - @Override - public String getKey() { - return key; - } - - public void setKey(String key) { - this.key = key; - } - - public String getLogBase() { - return logBase; - } - - public void setLogBase(String logBase) { - this.logBase = logBase; - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/log/TaskLogFilter.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/log/TaskLogFilter.java deleted file mode 100644 index bb93f52cd5..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/log/TaskLogFilter.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.log; - -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.core.filter.Filter; -import ch.qos.logback.core.spi.FilterReply; -import cn.escheduler.server.utils.LoggerUtils; - -/** - * task log filter - */ -public class TaskLogFilter extends Filter { - - @Override - public FilterReply decide(ILoggingEvent event) { - if (event.getThreadName().startsWith(LoggerUtils.TASK_LOGGER_THREAD_NAME)) { - return FilterReply.ACCEPT; - } - return FilterReply.DENY; - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/log/WorkerLogFilter.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/log/WorkerLogFilter.java deleted file mode 100644 index ca8ea3fc64..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/log/WorkerLogFilter.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.log; - -import ch.qos.logback.classic.Level; -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.core.filter.Filter; -import ch.qos.logback.core.spi.FilterReply; - -/** - * worker log filter - */ -public class WorkerLogFilter extends Filter { - Level level; - - @Override - public FilterReply decide(ILoggingEvent event) { - if (event.getThreadName().startsWith("Worker-")){ - return FilterReply.ACCEPT; - } - return FilterReply.DENY; - } - public void setLevel(String level) { - this.level = Level.toLevel(level); - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/runner/FetchTaskThread.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/runner/FetchTaskThread.java deleted file mode 100644 index ca7501465d..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/runner/FetchTaskThread.java +++ /dev/null @@ -1,324 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.runner; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.queue.ITaskQueue; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.common.thread.ThreadUtils; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.FileUtils; -import cn.escheduler.common.utils.OSUtils; -import cn.escheduler.common.zk.AbstractZKClient; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.dao.entity.Tenant; -import cn.escheduler.dao.entity.WorkerGroup; -import cn.escheduler.server.zk.ZKWorkerClient; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang3.StringUtils; -import org.apache.curator.framework.recipes.locks.InterProcessMutex; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; -import java.util.Date; -import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadPoolExecutor; - -/** - * fetch task thread - */ -public class FetchTaskThread implements Runnable{ - - private static final Logger logger = LoggerFactory.getLogger(FetchTaskThread.class); - /** - * set worker concurrent tasks - */ - private final int taskNum; - - /** - * zkWorkerClient - */ - private final ZKWorkerClient zkWorkerClient; - - /** - * task queue impl - */ - protected ITaskQueue taskQueue; - - /** - * process database access - */ - private final ProcessDao processDao; - - /** - * worker thread pool executor - */ - private final ExecutorService workerExecService; - - /** - * worker exec nums - */ - private int workerExecNums; - - /** - * conf - */ - private Configuration conf; - - /** - * task instance - */ - private TaskInstance taskInstance; - - /** - * task instance id - */ - Integer taskInstId; - - public FetchTaskThread(int taskNum, ZKWorkerClient zkWorkerClient, - ProcessDao processDao, Configuration conf, - ITaskQueue taskQueue){ - this.taskNum = taskNum; - this.zkWorkerClient = zkWorkerClient; - this.processDao = processDao; - this.workerExecNums = conf.getInt(Constants.WORKER_EXEC_THREADS, - Constants.defaultWorkerExecThreadNum); - // worker thread pool executor - this.workerExecService = ThreadUtils.newDaemonFixedThreadExecutor("Worker-Fetch-Task-Thread",workerExecNums); - this.conf = conf; - this.taskQueue = taskQueue; - } - - /** - * Check if the task runs on this worker - * @param taskInstance - * @param host - * @return - */ - private boolean checkWorkerGroup(TaskInstance taskInstance, String host){ - - int taskWorkerGroupId = processDao.getTaskWorkerGroupId(taskInstance); - - if(taskWorkerGroupId <= 0){ - return true; - } - WorkerGroup workerGroup = processDao.queryWorkerGroupById(taskWorkerGroupId); - if(workerGroup == null ){ - logger.info("task {} cannot find the worker group, use all worker instead.", taskInstance.getId()); - return true; - } - String ips = workerGroup.getIpList(); - if(StringUtils.isBlank(ips)){ - logger.error("task:{} worker group:{} parameters(ip_list) is null, this task would be running on all workers", - taskInstance.getId(), workerGroup.getId()); - } - String[] ipArray = ips.split(Constants.COMMA); - List ipList = Arrays.asList(ipArray); - return ipList.contains(host); - } - - - - - @Override - public void run() { - while (Stopper.isRunning()){ - InterProcessMutex mutex = null; - try { - ThreadPoolExecutor poolExecutor = (ThreadPoolExecutor) workerExecService; - //check memory and cpu usage and threads - boolean runCheckFlag = OSUtils.checkResource(this.conf, false) && checkThreadCount(poolExecutor); - - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - - if(!runCheckFlag) { - continue; - } - - //whether have tasks, if no tasks , no need lock //get all tasks - List tasksQueueList = taskQueue.getAllTasks(Constants.SCHEDULER_TASKS_QUEUE); - if (CollectionUtils.isEmpty(tasksQueueList)){ - continue; - } - // creating distributed locks, lock path /escheduler/lock/worker - mutex = zkWorkerClient.acquireZkLock(zkWorkerClient.getZkClient(), - zkWorkerClient.getWorkerLockPath()); - - - // task instance id str - List taskQueueStrArr = taskQueue.poll(Constants.SCHEDULER_TASKS_QUEUE, taskNum); - - for(String taskQueueStr : taskQueueStrArr){ - if (StringUtils.isEmpty(taskQueueStr)) { - continue; - } - - if (!checkThreadCount(poolExecutor)) { - break; - } - - // get task instance id - - taskInstId = getTaskInstanceId(taskQueueStr); - - // get task instance relation - taskInstance = processDao.getTaskInstanceRelationByTaskId(taskInstId); - - Tenant tenant = processDao.getTenantForProcess(taskInstance.getProcessInstance().getTenantId(), - taskInstance.getProcessDefine().getUserId()); - - // verify tenant is null - if (verifyTenantIsNull(taskQueueStr, tenant)) { - continue; - } - - // set queue for process instance, user-specified queue takes precedence over tenant queue - String userQueue = processDao.queryUserQueueByProcessInstanceId(taskInstance.getProcessInstanceId()); - taskInstance.getProcessInstance().setQueue(StringUtils.isEmpty(userQueue) ? tenant.getQueue() : userQueue); - - logger.info("worker fetch taskId : {} from queue ", taskInstId); - - // mainly to wait for the master insert task to succeed - waitForMasterEnterQueue(); - - // verify task instance is null - if (verifyTaskInstanceIsNull(taskQueueStr)) { - continue; - } - - if(!checkWorkerGroup(taskInstance, OSUtils.getHost())){ - continue; - } - - // local execute path - String execLocalPath = getExecLocalPath(); - - logger.info("task instance local execute path : {} ", execLocalPath); - - // init task - taskInstance.init(OSUtils.getHost(), - new Date(), - execLocalPath); - - // check and create Linux users - FileUtils.createWorkDirAndUserIfAbsent(execLocalPath, - tenant.getTenantCode(), logger); - - logger.info("task : {} ready to submit to task scheduler thread",taskInstId); - // submit task - workerExecService.submit(new TaskScheduleThread(taskInstance, processDao)); - - // remove node from zk - taskQueue.removeNode(Constants.SCHEDULER_TASKS_QUEUE, taskQueueStr); - } - - }catch (Exception e){ - logger.error("fetch task thread failure" ,e); - }finally { - AbstractZKClient.releaseMutex(mutex); - } - } - } - - /** - * verify task instance is null - * @param taskQueueStr - * @return - */ - private boolean verifyTaskInstanceIsNull(String taskQueueStr) { - if (taskInstance == null ) { - logger.error("task instance is null. task id : {} ", taskInstId); - taskQueue.removeNode(Constants.SCHEDULER_TASKS_QUEUE, taskQueueStr); - return true; - } - return false; - } - - /** - * verify tenant is null - * @param taskQueueStr - * @param tenant - * @return - */ - private boolean verifyTenantIsNull(String taskQueueStr, Tenant tenant) { - if(tenant == null){ - logger.error("tenant not exists,process define id : {},process instance id : {},task instance id : {}", - taskInstance.getProcessDefine().getId(), - taskInstance.getProcessInstance().getId(), - taskInstance.getId()); - taskQueue.removeNode(Constants.SCHEDULER_TASKS_QUEUE, taskQueueStr); - return true; - } - return false; - } - - /** - * get execute local path - * @return - */ - private String getExecLocalPath(){ - return FileUtils.getProcessExecDir(taskInstance.getProcessDefine().getProjectId(), - taskInstance.getProcessDefine().getId(), - taskInstance.getProcessInstance().getId(), - taskInstance.getId()); - } - - /** - * check - * @param poolExecutor - * @return - */ - private boolean checkThreadCount(ThreadPoolExecutor poolExecutor) { - int activeCount = poolExecutor.getActiveCount(); - if (activeCount >= workerExecNums) { - logger.info("thread insufficient , activeCount : {} , " + - "workerExecNums : {}, will sleep : {} millis for thread resource", - activeCount, - workerExecNums, - Constants.SLEEP_TIME_MILLIS); - return false; - } - return true; - } - - /** - * mainly to wait for the master insert task to succeed - * @throws Exception - */ - private void waitForMasterEnterQueue()throws Exception{ - int retryTimes = 30; - - while (taskInstance == null && retryTimes > 0) { - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - taskInstance = processDao.findTaskInstanceById(taskInstId); - retryTimes--; - } - } - - /** - * get task instance id - * - * @param taskQueueStr - * @return - */ - private int getTaskInstanceId(String taskQueueStr){ - return Integer.parseInt(taskQueueStr.split(Constants.UNDERLINE)[3]); - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/runner/TaskScheduleThread.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/runner/TaskScheduleThread.java deleted file mode 100644 index fff74cd949..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/runner/TaskScheduleThread.java +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.runner; - - -import ch.qos.logback.classic.LoggerContext; -import ch.qos.logback.classic.sift.SiftingAppender; -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.TaskType; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.TaskTimeoutParameter; -import cn.escheduler.common.utils.*; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.dao.entity.Tenant; -import cn.escheduler.server.utils.LoggerUtils; -import cn.escheduler.server.worker.log.TaskLogDiscriminator; -import cn.escheduler.server.worker.task.AbstractTask; -import cn.escheduler.server.worker.task.TaskManager; -import cn.escheduler.server.worker.task.TaskProps; -import com.alibaba.fastjson.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.util.*; -import java.util.stream.Collectors; - - -/** - * task scheduler thread - */ -public class TaskScheduleThread implements Runnable { - - /** - * logger - */ - private final Logger logger = LoggerFactory.getLogger(TaskScheduleThread.class); - - /** - * task instance - */ - private TaskInstance taskInstance; - - /** - * process database access - */ - private final ProcessDao processDao; - - /** - * abstract task - */ - private AbstractTask task; - - public TaskScheduleThread(TaskInstance taskInstance, ProcessDao processDao){ - this.processDao = processDao; - this.taskInstance = taskInstance; - } - - @Override - public void run() { - - // update task state is running according to task type - updateTaskState(taskInstance.getTaskType()); - - try { - logger.info("script path : {}", taskInstance.getExecutePath()); - // task node - TaskNode taskNode = JSONObject.parseObject(taskInstance.getTaskJson(), TaskNode.class); - - // copy hdfs/minio file to local - copyHdfsToLocal(processDao, - taskInstance.getExecutePath(), - createProjectResFiles(taskNode), - logger); - - // get process instance according to tak instance - ProcessInstance processInstance = taskInstance.getProcessInstance(); - // get process define according to tak instance - ProcessDefinition processDefine = taskInstance.getProcessDefine(); - - // get tenant info - Tenant tenant = processDao.getTenantForProcess(processInstance.getTenantId(), - processDefine.getUserId()); - - if(tenant == null){ - logger.error("cannot find the tenant, process definition id:{}, user id:{}", - processDefine.getId(), - processDefine.getUserId()); - task.setExitStatusCode(Constants.EXIT_CODE_FAILURE); - }else{ - - // set task props - TaskProps taskProps = new TaskProps(taskNode.getParams(), - taskInstance.getExecutePath(), - processInstance.getScheduleTime(), - taskInstance.getName(), - taskInstance.getTaskType(), - taskInstance.getId(), - CommonUtils.getSystemEnvPath(), - tenant.getTenantCode(), - tenant.getQueue(), - taskInstance.getStartTime(), - getGlobalParamsMap(), - taskInstance.getDependency(), - processInstance.getCmdTypeIfComplement()); - // set task timeout - setTaskTimeout(taskProps, taskNode); - - taskProps.setTaskAppId(String.format("%s_%s_%s", - taskInstance.getProcessDefine().getId(), - taskInstance.getProcessInstance().getId(), - taskInstance.getId())); - - // custom logger - Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, - taskInstance.getProcessDefine().getId(), - taskInstance.getProcessInstance().getId(), - taskInstance.getId())); - - task = TaskManager.newTask(taskInstance.getTaskType(), - taskProps, - taskLogger); - - // task init - task.init(); - - // task handle - task.handle(); - - // task result process - task.after(); - } - }catch (Exception e){ - logger.error("task scheduler failure", e); - task.setExitStatusCode(Constants.EXIT_CODE_FAILURE); - kill(); - } - - logger.info("task instance id : {},task final status : {}", - taskInstance.getId(), - task.getExitStatus()); - // update task instance state - processDao.changeTaskState(task.getExitStatus(), - new Date(), - taskInstance.getId()); - } - - /** - * get global paras map - * @return - */ - private Map getGlobalParamsMap() { - Map globalParamsMap = new HashMap<>(16); - - // global params string - String globalParamsStr = taskInstance.getProcessInstance().getGlobalParams(); - - if (globalParamsStr != null) { - List globalParamsList = JSONObject.parseArray(globalParamsStr, Property.class); - globalParamsMap.putAll(globalParamsList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue))); - } - return globalParamsMap; - } - - /** - * update task state according to task type - * @param taskType - */ - private void updateTaskState(String taskType) { - // update task status is running - if(taskType.equals(TaskType.SQL.name()) || - taskType.equals(TaskType.PROCEDURE.name())){ - processDao.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, - taskInstance.getStartTime(), - taskInstance.getHost(), - null, - getTaskLogPath(), - taskInstance.getId()); - }else{ - processDao.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, - taskInstance.getStartTime(), - taskInstance.getHost(), - taskInstance.getExecutePath(), - getTaskLogPath(), - taskInstance.getId()); - } - } - - /** - * get task log path - * @return - */ - private String getTaskLogPath() { - String baseLog = ((TaskLogDiscriminator) ((SiftingAppender) ((LoggerContext) LoggerFactory.getILoggerFactory()) - .getLogger("ROOT") - .getAppender("TASKLOGFILE")) - .getDiscriminator()).getLogBase(); - if (baseLog.startsWith(Constants.SINGLE_SLASH)){ - return baseLog + Constants.SINGLE_SLASH + - taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + - taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + - taskInstance.getId() + ".log"; - } - return System.getProperty("user.dir") + Constants.SINGLE_SLASH + - baseLog + Constants.SINGLE_SLASH + - taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + - taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + - taskInstance.getId() + ".log"; - } - - /** - * set task timeout - * @param taskProps - * @param taskNode - */ - private void setTaskTimeout(TaskProps taskProps, TaskNode taskNode) { - // the default timeout is the maximum value of the integer - taskProps.setTaskTimeout(Integer.MAX_VALUE); - TaskTimeoutParameter taskTimeoutParameter = taskNode.getTaskTimeoutParameter(); - if (taskTimeoutParameter.getEnable()){ - // get timeout strategy - taskProps.setTaskTimeoutStrategy(taskTimeoutParameter.getStrategy()); - switch (taskTimeoutParameter.getStrategy()){ - case WARN: - break; - case FAILED: - if (Integer.MAX_VALUE > taskTimeoutParameter.getInterval() * 60) { - taskProps.setTaskTimeout(taskTimeoutParameter.getInterval() * 60); - } - break; - case WARNFAILED: - if (Integer.MAX_VALUE > taskTimeoutParameter.getInterval() * 60) { - taskProps.setTaskTimeout(taskTimeoutParameter.getInterval() * 60); - } - break; - default: - logger.error("not support task timeout strategy: {}", taskTimeoutParameter.getStrategy()); - throw new IllegalArgumentException("not support task timeout strategy"); - - } - } - } - - - - - /** - * kill task - */ - public void kill(){ - if (task != null){ - try { - task.cancelApplication(true); - }catch (Exception e){ - logger.error(e.getMessage(),e); - } - } - } - - - /** - * create project resource files - */ - private List createProjectResFiles(TaskNode taskNode) throws Exception{ - - Set projectFiles = new HashSet<>(); - AbstractParameters baseParam = TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams()); - - if (baseParam != null) { - List projectResourceFiles = baseParam.getResourceFilesList(); - if (projectResourceFiles != null) { - projectFiles.addAll(projectResourceFiles); - } - } - - return new ArrayList<>(projectFiles); - } - - /** - * copy hdfs file to local - * - * @param processDao - * @param execLocalPath - * @param projectRes - * @param logger - */ - private void copyHdfsToLocal(ProcessDao processDao, String execLocalPath, List projectRes, Logger logger) throws IOException { - for (String res : projectRes) { - File resFile = new File(execLocalPath, res); - if (!resFile.exists()) { - try { - // query the tenant code of the resource according to the name of the resource - String tentnCode = processDao.queryTenantCodeByResName(res); - String resHdfsPath = HadoopUtils.getHdfsFilename(tentnCode,res); - - logger.info("get resource file from hdfs :{}", resHdfsPath); - HadoopUtils.getInstance().copyHdfsToLocal(resHdfsPath, execLocalPath + File.separator + res, false, true); - }catch (Exception e){ - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage()); - } - } else { - logger.info("file : {} exists ", resFile.getName()); - } - } - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractCommandExecutor.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractCommandExecutor.java deleted file mode 100644 index a3349fd93d..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractCommandExecutor.java +++ /dev/null @@ -1,569 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.thread.ThreadUtils; -import cn.escheduler.common.utils.HadoopUtils; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.server.utils.LoggerUtils; -import cn.escheduler.server.utils.ProcessUtils; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; - -import java.io.*; -import java.lang.reflect.Field; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * abstract command executor - */ -public abstract class AbstractCommandExecutor { - /** - * rules for extracting application ID - */ - protected static final Pattern APPLICATION_REGEX = Pattern.compile(Constants.APPLICATION_REGEX); - - /** - * process - */ - private Process process; - - /** - * log handler - */ - protected Consumer> logHandler; - - /** - * task dir - */ - protected final String taskDir; - - /** - * task appId - */ - protected final String taskAppId; - - /** - * task appId - */ - protected final int taskInstId; - - /** - * tenant code , execute task linux user - */ - protected final String tenantCode; - - /** - * env file - */ - protected final String envFile; - - /** - * start time - */ - protected final Date startTime; - - /** - * timeout - */ - protected int timeout; - - /** - * logger - */ - protected Logger logger; - - /** - * log list - */ - protected final List logBuffer; - - - public AbstractCommandExecutor(Consumer> logHandler, - String taskDir, String taskAppId,int taskInstId,String tenantCode, String envFile, - Date startTime, int timeout, Logger logger){ - this.logHandler = logHandler; - this.taskDir = taskDir; - this.taskAppId = taskAppId; - this.taskInstId = taskInstId; - this.tenantCode = tenantCode; - this.envFile = envFile; - this.startTime = startTime; - this.timeout = timeout; - this.logger = logger; - this.logBuffer = Collections.synchronizedList(new ArrayList<>()); - } - - /** - * task specific execution logic - * - * @param execCommand - * @param processDao - * @return - */ - public int run(String execCommand, ProcessDao processDao) { - int exitStatusCode; - - try { - if (StringUtils.isEmpty(execCommand)) { - exitStatusCode = 0; - return exitStatusCode; - } - - String commandFilePath = buildCommandFilePath(); - - // create command file if not exists - createCommandFileIfNotExists(execCommand, commandFilePath); - - //build process - buildProcess(commandFilePath); - - // parse process output - parseProcessOutput(process); - - // get process id - int pid = getProcessId(process); - - // task instance id - int taskInstId = Integer.parseInt(taskAppId.split("_")[2]); - - processDao.updatePidByTaskInstId(taskInstId, pid); - - logger.info("process start, process id is: {}", pid); - - // if timeout occurs, exit directly - long remainTime = getRemaintime(); - - // waiting for the run to finish - boolean status = process.waitFor(remainTime, TimeUnit.SECONDS); - - if (status) { - exitStatusCode = process.exitValue(); - logger.info("process has exited, work dir:{}, pid:{} ,exitStatusCode:{}", taskDir, pid,exitStatusCode); - //update process state to db - exitStatusCode = updateState(processDao, exitStatusCode, pid, taskInstId); - - } else { - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); - if (taskInstance == null) { - logger.error("task instance id:{} not exist", taskInstId); - } else { - ProcessUtils.kill(taskInstance); - } - exitStatusCode = -1; - logger.warn("process timeout, work dir:{}, pid:{}", taskDir, pid); - } - - } catch (InterruptedException e) { - exitStatusCode = -1; - logger.error(String.format("interrupt exception: {}, task may be cancelled or killed",e.getMessage()), e); - throw new RuntimeException("interrupt exception. exitCode is : " + exitStatusCode); - } catch (Exception e) { - exitStatusCode = -1; - logger.error(e.getMessage(), e); - throw new RuntimeException("process error . exitCode is : " + exitStatusCode); - } - - return exitStatusCode; - } - - /** - * build process - * - * @param commandFile - * @throws IOException - */ - private void buildProcess(String commandFile) throws IOException { - //init process builder - ProcessBuilder processBuilder = new ProcessBuilder(); - // setting up a working directory - processBuilder.directory(new File(taskDir)); - // merge error information to standard output stream - processBuilder.redirectErrorStream(true); - // setting up user to run commands - processBuilder.command("sudo", "-u", tenantCode, commandType(), commandFile); - - process = processBuilder.start(); - - // print command - printCommand(processBuilder); - } - - /** - * update process state to db - * - * @param processDao - * @param exitStatusCode - * @param pid - * @param taskInstId - * @return - */ - private int updateState(ProcessDao processDao, int exitStatusCode, int pid, int taskInstId) { - //get yarn state by log - if (exitStatusCode != 0) { - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); - logger.info("process id is {}", pid); - - List appIds = getAppLinks(taskInstance.getLogPath()); - if (appIds.size() > 0) { - String appUrl = String.join(Constants.COMMA, appIds); - logger.info("yarn log url:{}",appUrl); - processDao.updatePidByTaskInstId(taskInstId, pid, appUrl); - } - - // check if all operations are completed - if (!isSuccessOfYarnState(appIds)) { - exitStatusCode = -1; - } - } - return exitStatusCode; - } - - - /** - * cancel python task - */ - public void cancelApplication() throws Exception { - if (process == null) { - return; - } - - // clear log - clear(); - - int processId = getProcessId(process); - - logger.info("cancel process: {}", processId); - - // kill , waiting for completion - boolean killed = softKill(processId); - - if (!killed) { - // hard kill - hardKill(processId); - - // destory - process.destroy(); - - process = null; - } - } - - /** - * soft kill - * @param processId - * @return - * @throws InterruptedException - */ - private boolean softKill(int processId) { - - if (processId != 0 && process.isAlive()) { - try { - // sudo -u user command to run command - String cmd = String.format("sudo kill %d", processId); - - logger.info("soft kill task:{}, process id:{}, cmd:{}", taskAppId, processId, cmd); - - Runtime.getRuntime().exec(cmd); - } catch (IOException e) { - logger.info("kill attempt failed." + e.getMessage(), e); - } - } - - return process.isAlive(); - } - - /** - * hard kill - * @param processId - */ - private void hardKill(int processId) { - if (processId != 0 && process.isAlive()) { - try { - String cmd = String.format("sudo kill -9 %d", processId); - - logger.info("hard kill task:{}, process id:{}, cmd:{}", taskAppId, processId, cmd); - - Runtime.getRuntime().exec(cmd); - } catch (IOException e) { - logger.error("kill attempt failed." + e.getMessage(), e); - } - } - } - - /** - * print command - * @param processBuilder - */ - private void printCommand(ProcessBuilder processBuilder) { - String cmdStr; - - try { - cmdStr = ProcessUtils.buildCommandStr(processBuilder.command()); - logger.info("task run command:\n{}", cmdStr); - } catch (IOException e) { - logger.error(e.getMessage(), e); - } - } - - /** - * clear - */ - private void clear() { - if (!logBuffer.isEmpty()) { - // log handle - logHandler.accept(logBuffer); - - logBuffer.clear(); - } - } - - /** - * get the standard output of the process - */ - private void parseProcessOutput(Process process) { - String threadLoggerInfoName = String.format(LoggerUtils.TASK_LOGGER_THREAD_NAME + "-%s", taskAppId); - ExecutorService parseProcessOutputExecutorService = ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName); - parseProcessOutputExecutorService.submit(new Runnable(){ - @Override - public void run() { - BufferedReader inReader = null; - - try { - inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); - String line; - - long lastFlushTime = System.currentTimeMillis(); - - while ((line = inReader.readLine()) != null) { - logBuffer.add(line); - lastFlushTime = flush(lastFlushTime); - } - } catch (Exception e) { - logger.error(e.getMessage(),e); - } finally { - clear(); - close(inReader); - } - } - }); - parseProcessOutputExecutorService.shutdown(); - } - - public int getPid() { - return getProcessId(process); - } - - /** - * check yarn state - * - * @param appIds - * @return - */ - public boolean isSuccessOfYarnState(List appIds) { - - boolean result = true; - try { - for (String appId : appIds) { - while(true){ - ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId); - logger.info("appId:{}, final state:{}",appId,applicationStatus.name()); - if (applicationStatus.equals(ExecutionStatus.FAILURE) || - applicationStatus.equals(ExecutionStatus.KILL)) { - return false; - } - - if (applicationStatus.equals(ExecutionStatus.SUCCESS)){ - break; - } - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } - } - } catch (Exception e) { - logger.error(String.format("yarn applications: %s status failed : " + e.getMessage(), appIds.toString()),e); - result = false; - } - return result; - - } - - /** - * get app links - * @param fileName - * @return - */ - private List getAppLinks(String fileName) { - List logs = convertFile2List(fileName); - - List appIds = new ArrayList(); - /** - * analysis log,get submited yarn application id - */ - for (String log : logs) { - - String appId = findAppId(log); - if (StringUtils.isNotEmpty(appId) && !appIds.contains(appId)) { - logger.info("find app id: {}", appId); - appIds.add(appId); - } - } - return appIds; - } - - /** - * convert file to list - * @param filename - * @return - */ - private List convertFile2List(String filename) { - List lineList = new ArrayList(100); - File file=new File(filename); - - if (!file.exists()){ - return lineList; - } - - BufferedReader br = null; - try { - br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), StandardCharsets.UTF_8)); - String line = null; - while ((line = br.readLine()) != null) { - lineList.add(line); - } - } catch (Exception e) { - logger.error(String.format("read file: %s failed : ",filename),e); - } finally { - if(br != null){ - try { - br.close(); - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - } - - } - return lineList; - } - - /** - * find app id - * - * @return appid - */ - private String findAppId(String line) { - Matcher matcher = APPLICATION_REGEX.matcher(line); - - if (matcher.find() && checkFindApp(line)) { - return matcher.group(); - } - - return null; - } - - - /** - * get remain time(s) - * - * @return - */ - private long getRemaintime() { - long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000; - long remainTime = timeout - usedTime; - - if (remainTime < 0) { - throw new RuntimeException("task execution time out"); - } - - return remainTime; - } - - /** - * get process id - * - * @param process - * @return - */ - private int getProcessId(Process process) { - int processId = 0; - - try { - Field f = process.getClass().getDeclaredField(Constants.PID); - f.setAccessible(true); - - processId = f.getInt(process); - } catch (Throwable e) { - logger.error(e.getMessage(), e); - } - - return processId; - } - - /** - * when log buffer siz or flush time reach condition , then flush - * - * @param lastFlushTime last flush time - * @return - */ - private long flush(long lastFlushTime) { - long now = System.currentTimeMillis(); - - /** - * when log buffer siz or flush time reach condition , then flush - */ - if (logBuffer.size() >= Constants.defaultLogRowsNum || now - lastFlushTime > Constants.defaultLogFlushInterval) { - lastFlushTime = now; - /** log handle */ - logHandler.accept(logBuffer); - - logBuffer.clear(); - } - return lastFlushTime; - } - - /** - * close buffer reader - * - * @param inReader - */ - private void close(BufferedReader inReader) { - if (inReader != null) { - try { - inReader.close(); - } catch (IOException e) { - logger.error(e.getMessage(), e); - } - } - } - - - protected abstract String buildCommandFilePath(); - protected abstract String commandType(); - protected abstract boolean checkFindApp(String line); - protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException; -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractTask.java deleted file mode 100644 index 6472873d8b..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractTask.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.TaskRecordStatus; -import cn.escheduler.common.enums.TaskType; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.flink.FlinkParameters; -import cn.escheduler.common.task.mr.MapreduceParameters; -import cn.escheduler.common.task.procedure.ProcedureParameters; -import cn.escheduler.common.task.python.PythonParameters; -import cn.escheduler.common.task.shell.ShellParameters; -import cn.escheduler.common.task.spark.SparkParameters; -import cn.escheduler.common.task.sql.SqlParameters; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.TaskRecordDao; -import cn.escheduler.server.utils.ParamUtils; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; - -import java.util.List; -import java.util.Map; - -/** - * executive task - */ -public abstract class AbstractTask { - - /** - * task props - **/ - protected TaskProps taskProps; - - /** - * log record - */ - protected Logger logger; - - - /** - * cancel - */ - protected volatile boolean cancel = false; - - /** - * exit code - */ - protected volatile int exitStatusCode = -1; - - /** - * @param taskProps - * @param logger - */ - protected AbstractTask(TaskProps taskProps, Logger logger) { - this.taskProps = taskProps; - this.logger = logger; - } - - /** - * init task - */ - public void init() throws Exception { - } - - /** - * task handle - */ - public abstract void handle() throws Exception; - - - - public void cancelApplication(boolean status) throws Exception { - this.cancel = status; - } - - /** - * log process - */ - public void logHandle(List logs) { - // note that the "new line" is added here to facilitate log parsing - logger.info(" -> {}", String.join("\n\t", logs)); - } - - - /** - * exit code - */ - public int getExitStatusCode() { - return exitStatusCode; - } - - public void setExitStatusCode(int exitStatusCode) { - this.exitStatusCode = exitStatusCode; - } - - /** - * get task parameters - */ - public abstract AbstractParameters getParameters(); - - - /** - * result processing - */ - public void after(){ - if (getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ - // task recor flat : if true , start up qianfan - if (TaskRecordDao.getTaskRecordFlag() - && TaskType.typeIsNormalTask(taskProps.getTaskType())){ - AbstractParameters params = (AbstractParameters) JSONUtils.parseObject(taskProps.getTaskParams(), getCurTaskParamsClass()); - - // replace placeholder - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - params.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); - if (paramsMap != null && !paramsMap.isEmpty() - && paramsMap.containsKey("v_proc_date")){ - String vProcDate = paramsMap.get("v_proc_date").getValue(); - if (!StringUtils.isEmpty(vProcDate)){ - TaskRecordStatus taskRecordState = TaskRecordDao.getTaskRecordState(taskProps.getNodeName(), vProcDate); - logger.info("task record status : {}",taskRecordState); - if (taskRecordState == TaskRecordStatus.FAILURE){ - setExitStatusCode(Constants.EXIT_CODE_FAILURE); - } - } - } - } - - }else if (getExitStatusCode() == Constants.EXIT_CODE_KILL){ - setExitStatusCode(Constants.EXIT_CODE_KILL); - }else { - setExitStatusCode(Constants.EXIT_CODE_FAILURE); - } - } - - - - - /** - * get current task parameter class - * @return - */ - private Class getCurTaskParamsClass(){ - Class paramsClass = null; - // get task type - TaskType taskType = TaskType.valueOf(taskProps.getTaskType()); - switch (taskType){ - case SHELL: - paramsClass = ShellParameters.class; - break; - case SQL: - paramsClass = SqlParameters.class; - break; - case PROCEDURE: - paramsClass = ProcedureParameters.class; - break; - case MR: - paramsClass = MapreduceParameters.class; - break; - case SPARK: - paramsClass = SparkParameters.class; - break; - case FLINK: - paramsClass = FlinkParameters.class; - case PYTHON: - paramsClass = PythonParameters.class; - break; - default: - logger.error("not support this task type: {}", taskType); - throw new IllegalArgumentException("not support this task type"); - } - return paramsClass; - } - - /** - * get exit status according to exitCode - * @return - */ - public ExecutionStatus getExitStatus(){ - ExecutionStatus status; - switch (getExitStatusCode()){ - case Constants.EXIT_CODE_SUCCESS: - status = ExecutionStatus.SUCCESS; - break; - case Constants.EXIT_CODE_KILL: - status = ExecutionStatus.KILL; - break; - default: - status = ExecutionStatus.FAILURE; - break; - } - return status; - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractYarnTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractYarnTask.java deleted file mode 100644 index 84d220a937..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/AbstractYarnTask.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task; - -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.server.utils.ProcessUtils; -import org.slf4j.Logger; - -import java.io.IOException; - -/** - * abstract yarn task - */ -public abstract class AbstractYarnTask extends AbstractTask { - - /** - * process instance - */ - - /** - * process task - */ - private ShellCommandExecutor shellCommandExecutor; - - /** - * process database access - */ - protected ProcessDao processDao; - - /** - * @param taskProps - * @param logger - * @throws IOException - */ - public AbstractYarnTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); - this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); - this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, - taskProps.getTaskDir(), - taskProps.getTaskAppId(), - taskProps.getTaskInstId(), - taskProps.getTenantCode(), - taskProps.getEnvFile(), - taskProps.getTaskStartTime(), - taskProps.getTaskTimeout(), - logger); - } - - @Override - public void handle() throws Exception { - try { - // construct process - exitStatusCode = shellCommandExecutor.run(buildCommand(), processDao); - } catch (Exception e) { - logger.error("yarn process failure", e); - exitStatusCode = -1; - } - } - - @Override - public void cancelApplication(boolean status) throws Exception { - cancel = true; - // cancel process - shellCommandExecutor.cancelApplication(); - TaskInstance taskInstance = processDao.findTaskInstanceById(taskProps.getTaskInstId()); - if (status && taskInstance != null){ - ProcessUtils.killYarnJob(taskInstance); - } - } - - /** - * create command - */ - protected abstract String buildCommand() throws Exception; -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/PythonCommandExecutor.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/PythonCommandExecutor.java deleted file mode 100644 index 6ea70f96f5..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/PythonCommandExecutor.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.utils.FileUtils; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.Date; -import java.util.List; -import java.util.function.Consumer; - -/** - * python command executor - */ -public class PythonCommandExecutor extends AbstractCommandExecutor { - - private static final Logger logger = LoggerFactory.getLogger(PythonCommandExecutor.class); - - public static final String PYTHON = "python"; - - - - public PythonCommandExecutor(Consumer> logHandler, - String taskDir, - String taskAppId, - int taskInstId, - String tenantCode, - String envFile, - Date startTime, - int timeout, - Logger logger) { - super(logHandler,taskDir,taskAppId,taskInstId,tenantCode, envFile, startTime, timeout, logger); - } - - - /** - * build command file path - * - * @return - */ - @Override - protected String buildCommandFilePath() { - return String.format("%s/py_%s.command", taskDir, taskAppId); - } - - /** - * create command file if not exists - * - * @param commandFile - * @throws IOException - */ - @Override - protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { - logger.info("tenantCode :{}, task dir:{}", tenantCode, taskDir); - - if (!Files.exists(Paths.get(commandFile))) { - logger.info("generate command file:{}", commandFile); - - StringBuilder sb = new StringBuilder(); - sb.append("#-*- encoding=utf8 -*-\n"); - - sb.append("\n\n"); - sb.append(execCommand); - logger.info(sb.toString()); - - // write data to file - FileUtils.writeStringToFile(new File(commandFile), - sb.toString(), - StandardCharsets.UTF_8); - } - } - - @Override - protected String commandType() { - String pythonHome = getPythonHome(envFile); - if (StringUtils.isEmpty(pythonHome)){ - return PYTHON; - } - return pythonHome; - } - - @Override - protected boolean checkFindApp(String line) { - return true; - } - - - /** - * get the absolute path of the Python command - * note : - * common.properties - * PYTHON_HOME configured under common.properties is Python absolute path, not PYTHON_HOME itself - * - * for example : - * your PYTHON_HOM is /opt/python3.7/ - * you must set PYTHON_HOME is /opt/python3.7/python under nder common.properties - * escheduler.env.path file. - * - * @param envPath - * @return - */ - private static String getPythonHome(String envPath){ - BufferedReader br = null; - StringBuilder sb = new StringBuilder(); - try { - br = new BufferedReader(new InputStreamReader(new FileInputStream(envPath))); - String line; - while ((line = br.readLine()) != null){ - if (line.contains(Constants.PYTHON_HOME)){ - sb.append(line); - break; - } - } - String result = sb.toString(); - if (org.apache.commons.lang.StringUtils.isEmpty(result)){ - return null; - } - String[] arrs = result.split(Constants.EQUAL_SIGN); - if (arrs.length == 2){ - return arrs[1]; - } - - }catch (IOException e){ - logger.error("read file failure",e); - }finally { - try { - if (br != null){ - br.close(); - } - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - } - return null; - } - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/ShellCommandExecutor.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/ShellCommandExecutor.java deleted file mode 100644 index d826fee9ca..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/ShellCommandExecutor.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task; - -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; - -import java.io.File; -import java.io.IOException; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.Date; -import java.util.List; -import java.util.function.Consumer; - -/** - * shell command executor - */ -public class ShellCommandExecutor extends AbstractCommandExecutor { - - public static final String SH = "sh"; - - - public ShellCommandExecutor(Consumer> logHandler, - String taskDir, - String taskAppId, - int taskInstId, - String tenantCode, - String envFile, - Date startTime, - int timeout, - Logger logger) { - super(logHandler,taskDir,taskAppId,taskInstId,tenantCode, envFile, startTime, timeout, logger); - } - - - @Override - protected String buildCommandFilePath() { - // command file - return String.format("%s/%s.command", taskDir, taskAppId); - } - - @Override - protected String commandType() { - return SH; - } - - @Override - protected boolean checkFindApp(String line) { - return line.contains(taskAppId); - } - - @Override - protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { - logger.info("tenantCode user:{}, task dir:{}", tenantCode, taskAppId); - - // create if non existence - if (!Files.exists(Paths.get(commandFile))) { - logger.info("create command file:{}", commandFile); - - StringBuilder sb = new StringBuilder(); - sb.append("#!/bin/sh\n"); - sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); - sb.append("cd $BASEDIR\n"); - - if (envFile != null) { - sb.append("source " + envFile + "\n"); - } - - sb.append("\n\n"); - sb.append(execCommand); - logger.info("command : {}",sb.toString()); - - // write data to file - FileUtils.writeStringToFile(new File(commandFile), sb.toString(), - Charset.forName("UTF-8")); - } - } - - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/TaskManager.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/TaskManager.java deleted file mode 100644 index de96ff25be..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/TaskManager.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task; - - -import cn.escheduler.common.enums.TaskType; -import cn.escheduler.server.worker.task.dependent.DependentTask; -import cn.escheduler.server.worker.task.flink.FlinkTask; -import cn.escheduler.server.worker.task.http.HttpTask; -import cn.escheduler.server.worker.task.mr.MapReduceTask; -import cn.escheduler.server.worker.task.processdure.ProcedureTask; -import cn.escheduler.server.worker.task.python.PythonTask; -import cn.escheduler.server.worker.task.shell.ShellTask; -import cn.escheduler.server.worker.task.spark.SparkTask; -import cn.escheduler.server.worker.task.sql.SqlTask; -import org.apache.commons.lang3.EnumUtils; -import org.slf4j.Logger; - -/** - * task manaster - */ -public class TaskManager { - - - /** - * create new task - * @param taskType - * @param props - * @param logger - * @return - * @throws IllegalArgumentException - */ - public static AbstractTask newTask(String taskType, TaskProps props, Logger logger) - throws IllegalArgumentException { - switch (EnumUtils.getEnum(TaskType.class,taskType)) { - case SHELL: - return new ShellTask(props, logger); - case PROCEDURE: - return new ProcedureTask(props, logger); - case SQL: - return new SqlTask(props, logger); - case MR: - return new MapReduceTask(props, logger); - case SPARK: - return new SparkTask(props, logger); - case FLINK: - return new FlinkTask(props, logger); - case PYTHON: - return new PythonTask(props, logger); - case DEPENDENT: - return new DependentTask(props, logger); - case HTTP: - return new HttpTask(props, logger); - default: - logger.error("unsupport task type: {}", taskType); - throw new IllegalArgumentException("not support task type"); - } - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/TaskProps.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/TaskProps.java deleted file mode 100644 index 0db35e8d0a..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/TaskProps.java +++ /dev/null @@ -1,290 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task; - -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.enums.DataType; -import cn.escheduler.common.enums.Direct; -import cn.escheduler.common.enums.TaskTimeoutStrategy; -import cn.escheduler.common.process.Property; - -import java.util.Date; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -/** - * task props - */ -public class TaskProps { - - /** - * task node name - **/ - private String nodeName; - - /** - * task instance id - **/ - private int taskInstId; - - /** - * tenant code , execute task linux user - **/ - private String tenantCode; - - private String taskType; - - /** - * task parameters - **/ - private String taskParams; - - /** - * task dir - **/ - private String taskDir; - - /** - * queue - **/ - private String queue; - - /** - * env file - **/ - private String envFile; - - /** - * defined params - **/ - private Map definedParams; - - /** - * task path - */ - private String taskAppId; - - /** - * task start time - */ - private Date taskStartTime; - - /** - * task timeout - */ - private int taskTimeout; - - /** - * task timeout strategy - */ - private TaskTimeoutStrategy taskTimeoutStrategy; - /** - * task dependence - */ - private String dependence; - - /** - * schedule time - * @return - */ - private Date scheduleTime; - - /** - * command type is complement - */ - private CommandType cmdTypeIfComplement; - - - public TaskProps(){} - public TaskProps(String taskParams, - String taskDir, - Date scheduleTime, - String nodeName, - String taskType, - int taskInstId, - String envFile, - String tenantCode, - String queue, - Date taskStartTime, - Map definedParams, - String dependence, - CommandType cmdTypeIfComplement){ - this.taskParams = taskParams; - this.taskDir = taskDir; - this.scheduleTime = scheduleTime; - this.nodeName = nodeName; - this.taskType = taskType; - this.taskInstId = taskInstId; - this.envFile = envFile; - this.tenantCode = tenantCode; - this.queue = queue; - this.taskStartTime = taskStartTime; - this.definedParams = definedParams; - this.dependence = dependence; - this.cmdTypeIfComplement = cmdTypeIfComplement; - - } - - public String getTenantCode() { - return tenantCode; - } - - public void setTenantCode(String tenantCode) { - this.tenantCode = tenantCode; - } - - public String getTaskParams() { - return taskParams; - } - - public void setTaskParams(String taskParams) { - this.taskParams = taskParams; - } - - public String getTaskDir() { - return taskDir; - } - - public void setTaskDir(String taskDir) { - this.taskDir = taskDir; - } - - public Map getDefinedParams() { - return definedParams; - } - - public void setDefinedParams(Map definedParams) { - this.definedParams = definedParams; - } - - public String getEnvFile() { - return envFile; - } - - public void setEnvFile(String envFile) { - this.envFile = envFile; - } - - - public String getNodeName() { - return nodeName; - } - - public void setNodeName(String nodeName) { - this.nodeName = nodeName; - } - - public int getTaskInstId() { - return taskInstId; - } - - public void setTaskInstId(int taskInstId) { - this.taskInstId = taskInstId; - } - - public String getQueue() { - return queue; - } - - public void setQueue(String queue) { - this.queue = queue; - } - - - public String getTaskAppId() { - return taskAppId; - } - - public void setTaskAppId(String taskAppId) { - this.taskAppId = taskAppId; - } - - public Date getTaskStartTime() { - return taskStartTime; - } - - public void setTaskStartTime(Date taskStartTime) { - this.taskStartTime = taskStartTime; - } - - public int getTaskTimeout() { - return taskTimeout; - } - - public void setTaskTimeout(int taskTimeout) { - this.taskTimeout = taskTimeout; - } - - public TaskTimeoutStrategy getTaskTimeoutStrategy() { - return taskTimeoutStrategy; - } - - public void setTaskTimeoutStrategy(TaskTimeoutStrategy taskTimeoutStrategy) { - this.taskTimeoutStrategy = taskTimeoutStrategy; - } - - public String getTaskType() { - return taskType; - } - - public void setTaskType(String taskType) { - this.taskType = taskType; - } - - public String getDependence() { - return dependence; - } - - public void setDependence(String dependence) { - this.dependence = dependence; - } - - public Date getScheduleTime() { - return scheduleTime; - } - - public void setScheduleTime(Date scheduleTime) { - this.scheduleTime = scheduleTime; - } - - public CommandType getCmdTypeIfComplement() { - return cmdTypeIfComplement; - } - - public void setCmdTypeIfComplement(CommandType cmdTypeIfComplement) { - this.cmdTypeIfComplement = cmdTypeIfComplement; - } - - /** - * get parameters map - * @return - */ - public Map getUserDefParamsMap() { - if (definedParams != null) { - Map userDefParamsMaps = new HashMap<>(); - Iterator> iter = definedParams.entrySet().iterator(); - while (iter.hasNext()){ - Map.Entry en = iter.next(); - Property property = new Property(en.getKey(), Direct.IN, DataType.VARCHAR , en.getValue()); - userDefParamsMaps.put(property.getProp(),property); - } - return userDefParamsMaps; - } - return null; - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/dependent/DependentExecute.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/dependent/DependentExecute.java deleted file mode 100644 index 698c8de62c..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/dependent/DependentExecute.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.dependent; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.DependResult; -import cn.escheduler.common.enums.DependentRelation; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.model.DateInterval; -import cn.escheduler.common.model.DependentItem; -import cn.escheduler.common.utils.DependentUtils; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; - -/** - * dependent item execute - */ -public class DependentExecute { - /** - * process dao - */ - private static final ProcessDao processDao = DaoFactory.getDaoInstance(ProcessDao.class); - - private List dependItemList; - private DependentRelation relation; - - private DependResult modelDependResult = DependResult.WAITING; - private Map dependResultMap = new HashMap<>(); - - private Logger logger = LoggerFactory.getLogger(DependentExecute.class); - - public DependentExecute(List itemList, DependentRelation relation){ - this.dependItemList = itemList; - this.relation = relation; - } - - /** - * get dependent item for one dependent item - * @param dependentItem - * @return - */ - public DependResult getDependentResultForItem(DependentItem dependentItem, Date currentTime){ - List dateIntervals = DependentUtils.getDateIntervalList(currentTime, dependentItem.getDateValue()); - return calculateResultForTasks(dependentItem, dateIntervals ); - } - - /** - * calculate dependent result for one dependent item. - * @param dependentItem - * @param dateIntervals - * @return - */ - private DependResult calculateResultForTasks(DependentItem dependentItem, - List dateIntervals) { - DependResult result = DependResult.FAILED; - for(DateInterval dateInterval : dateIntervals){ - ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(), - dateInterval); - if(processInstance == null){ - logger.error("cannot find the right process instance: definition id:{}, start:{}, end:{}", - dependentItem.getDefinitionId(), dateInterval.getStartTime(), dateInterval.getEndTime() ); - return DependResult.FAILED; - } - if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){ - result = getDependResultByState(processInstance.getState()); - }else{ - TaskInstance taskInstance = null; - List taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId()); - - for(TaskInstance task : taskInstanceList){ - if(task.getName().equals(dependentItem.getDepTasks())){ - taskInstance = task; - break; - } - } - if(taskInstance == null){ - // cannot find task in the process instance - // maybe because process instance is running or failed. - result = getDependResultByState(processInstance.getState()); - }else{ - result = getDependResultByState(taskInstance.getState()); - } - } - if(result != DependResult.SUCCESS){ - break; - } - } - return result; - } - - /** - * find the last one process instance that : - * 1. manual run and finish between the interval - * 2. schedule run and schedule time between the interval - * @param definitionId - * @param dateInterval - * @return - */ - private ProcessInstance findLastProcessInterval(int definitionId, DateInterval dateInterval) { - - ProcessInstance runningProcess = processDao.findLastRunningProcess(definitionId, dateInterval); - if(runningProcess != null){ - return runningProcess; - } - - ProcessInstance lastSchedulerProcess = processDao.findLastSchedulerProcessInterval( - definitionId, dateInterval - ); - - ProcessInstance lastManualProcess = processDao.findLastManualProcessInterval( - definitionId, dateInterval - ); - - if(lastManualProcess ==null){ - return lastSchedulerProcess; - } - if(lastSchedulerProcess == null){ - return lastManualProcess; - } - - return (lastManualProcess.getEndTime().after(lastSchedulerProcess.getEndTime()))? - lastManualProcess : lastSchedulerProcess; - } - - /** - * get dependent result by task/process instance state - * @param state - * @return - */ - private DependResult getDependResultByState(ExecutionStatus state) { - - if(state.typeIsRunning() || state == ExecutionStatus.SUBMITTED_SUCCESS || state == ExecutionStatus.WAITTING_THREAD){ - return DependResult.WAITING; - }else if(state.typeIsSuccess()){ - return DependResult.SUCCESS; - }else{ - return DependResult.FAILED; - } - } - - /** - * judge depend item finished - * @return - */ - public boolean finish(Date currentTime){ - if(modelDependResult == DependResult.WAITING){ - modelDependResult = getModelDependResult(currentTime); - return false; - } - return true; - } - - /** - * get model depend result - * @return - */ - public DependResult getModelDependResult(Date currentTime){ - - List dependResultList = new ArrayList<>(); - - for(DependentItem dependentItem : dependItemList){ - DependResult dependResult = getDependResultForItem(dependentItem, currentTime); - if(dependResult != DependResult.WAITING){ - dependResultMap.put(dependentItem.getKey(), dependResult); - } - dependResultList.add(dependResult); - } - modelDependResult = DependentUtils.getDependResultForRelation( - this.relation, dependResultList - ); - return modelDependResult; - } - - /** - * get dependent item result - * @param item - * @return - */ - public DependResult getDependResultForItem(DependentItem item, Date currentTime){ - String key = item.getKey(); - if(dependResultMap.containsKey(key)){ - return dependResultMap.get(key); - } - return getDependentResultForItem(item, currentTime); - } - - public Map getDependResultMap(){ - return dependResultMap; - } - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/dependent/DependentTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/dependent/DependentTask.java deleted file mode 100644 index d799077f5c..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/dependent/DependentTask.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.dependent; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.DependResult; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.model.DependentTaskModel; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.dependent.DependentParameters; -import cn.escheduler.common.thread.Stopper; -import cn.escheduler.common.utils.DependentUtils; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.server.worker.task.AbstractTask; -import cn.escheduler.server.worker.task.TaskProps; -import org.slf4j.Logger; - -import java.util.*; - -import static cn.escheduler.common.Constants.DEPENDENT_SPLIT; - -public class DependentTask extends AbstractTask { - - private List dependentTaskList = new ArrayList<>(); - - /** - * depend item result map - * save the result to log file - */ - private Map dependResultMap = new HashMap<>(); - - private DependentParameters dependentParameters; - - private Date dependentDate; - - private ProcessDao processDao; - - public DependentTask(TaskProps props, Logger logger) { - super(props, logger); - } - - @Override - public void init(){ - logger.info("dependent task initialize"); - - this.dependentParameters = JSONUtils.parseObject(this.taskProps.getDependence(), - DependentParameters.class); - - for(DependentTaskModel taskModel : dependentParameters.getDependTaskList()){ - this.dependentTaskList.add(new DependentExecute( - taskModel.getDependItemList(), taskModel.getRelation())); - } - - this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); - - if(taskProps.getScheduleTime() != null){ - this.dependentDate = taskProps.getScheduleTime(); - }else{ - this.dependentDate = taskProps.getTaskStartTime(); - } - - } - - @Override - public void handle(){ - // set the name of the current thread - String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); - - try{ - TaskInstance taskInstance = null; - while(Stopper.isRunning()){ - taskInstance = processDao.findTaskInstanceById(this.taskProps.getTaskInstId()); - - if(taskInstance == null){ - exitStatusCode = -1; - break; - } - - if(taskInstance.getState() == ExecutionStatus.KILL){ - this.cancel = true; - } - - if(this.cancel || allDependentTaskFinish()){ - break; - } - - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } - - if(cancel){ - exitStatusCode = Constants.EXIT_CODE_KILL; - }else{ - DependResult result = getTaskDependResult(); - exitStatusCode = (result == DependResult.SUCCESS) ? - Constants.EXIT_CODE_SUCCESS : Constants.EXIT_CODE_FAILURE; - } - }catch (Exception e){ - logger.error(e.getMessage(),e); - exitStatusCode = -1; - } - } - - /** - * get dependent result - * @return - */ - private DependResult getTaskDependResult(){ - List dependResultList = new ArrayList<>(); - for(DependentExecute dependentExecute : dependentTaskList){ - DependResult dependResult = dependentExecute.getModelDependResult(dependentDate); - dependResultList.add(dependResult); - } - DependResult result = DependentUtils.getDependResultForRelation( - this.dependentParameters.getRelation(), dependResultList - ); - return result; - } - - /** - * judge all dependent tasks finish - * @return - */ - private boolean allDependentTaskFinish(){ - boolean finish = true; - for(DependentExecute dependentExecute : dependentTaskList){ - Map resultMap = dependentExecute.getDependResultMap(); - Set keySet = resultMap.keySet(); - for(String key : keySet){ - if(!dependResultMap.containsKey(key)){ - dependResultMap.put(key, resultMap.get(key)); - //save depend result to log - logger.info("dependent item complete {} {},{}", - DEPENDENT_SPLIT, key, resultMap.get(key).toString()); - } - } - if(!dependentExecute.finish(dependentDate)){ - finish = false; - } - } - return finish; - } - - - @Override - public void cancelApplication(boolean cancelApplication) throws Exception { - // cancel process - this.cancel = true; - } - - @Override - public AbstractParameters getParameters() { - return null; - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/flink/FlinkTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/flink/FlinkTask.java deleted file mode 100644 index 2b24ef628d..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/flink/FlinkTask.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.flink; - -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.flink.FlinkParameters; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.server.utils.FlinkArgsUtils; -import cn.escheduler.server.utils.ParamUtils; -import cn.escheduler.server.worker.task.AbstractYarnTask; -import cn.escheduler.server.worker.task.TaskProps; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * flink task - */ -public class FlinkTask extends AbstractYarnTask { - - /** - * flink command - */ - private static final String FLINK_COMMAND = "flink"; - private static final String FLINK_RUN = "run"; - - /** - * flink parameters - */ - private FlinkParameters flinkParameters; - - public FlinkTask(TaskProps props, Logger logger) { - super(props, logger); - } - - @Override - public void init() { - - logger.info("flink task params {}", taskProps.getTaskParams()); - - flinkParameters = JSONUtils.parseObject(taskProps.getTaskParams(), FlinkParameters.class); - - if (!flinkParameters.checkParameters()) { - throw new RuntimeException("flink task params is not valid"); - } - flinkParameters.setQueue(taskProps.getQueue()); - - if (StringUtils.isNotEmpty(flinkParameters.getMainArgs())) { - String args = flinkParameters.getMainArgs(); - // get process instance by task instance id - ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); - - /** - * combining local and global parameters - */ - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - flinkParameters.getLocalParametersMap(), - processInstance.getCmdTypeIfComplement(), - processInstance.getScheduleTime()); - - logger.info("param Map : {}", paramsMap); - if (paramsMap != null ){ - - args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap)); - logger.info("param args : {}", args); - } - flinkParameters.setMainArgs(args); - } - } - - /** - * create command - * @return - */ - @Override - protected String buildCommand() { - List args = new ArrayList<>(); - - args.add(FLINK_COMMAND); - args.add(FLINK_RUN); - logger.info("flink task args : {}", args); - // other parameters - args.addAll(FlinkArgsUtils.buildArgs(flinkParameters)); - - String command = ParameterUtils - .convertParameterPlaceholders(String.join(" ", args), taskProps.getDefinedParams()); - - logger.info("flink task command : {}", command); - - return command; - } - - @Override - public AbstractParameters getParameters() { - return flinkParameters; - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/http/HttpTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/http/HttpTask.java deleted file mode 100644 index a19e1159a6..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/http/HttpTask.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.http; - - -import cn.escheduler.common.enums.HttpMethod; -import cn.escheduler.common.enums.HttpParametersType; -import cn.escheduler.common.process.HttpProperty; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.http.HttpParameters; -import cn.escheduler.common.utils.Bytes; -import cn.escheduler.common.utils.DateUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.server.utils.ParamUtils; -import cn.escheduler.server.worker.task.AbstractTask; -import cn.escheduler.server.worker.task.TaskProps; -import com.alibaba.fastjson.JSONObject; -import org.apache.commons.io.Charsets; -import org.apache.commons.lang3.StringUtils; -import org.apache.http.HttpEntity; -import org.apache.http.ParseException; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpUriRequest; -import org.apache.http.client.methods.RequestBuilder; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.util.EntityUtils; -import org.slf4j.Logger; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * http task - */ -public class HttpTask extends AbstractTask { - - private HttpParameters httpParameters; - - /** - * process database access - */ - private ProcessDao processDao; - - /** - * Convert mill seconds to second unit - */ - protected static final int MAX_CONNECTION_MILLISECONDS = 60000; - - protected static final String APPLICATION_JSON = "application/json"; - - protected String output; - - - public HttpTask(TaskProps props, Logger logger) { - super(props, logger); - this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); - } - - @Override - public void init() { - logger.info("http task params {}", taskProps.getTaskParams()); - this.httpParameters = JSONObject.parseObject(taskProps.getTaskParams(), HttpParameters.class); - - if (!httpParameters.checkParameters()) { - throw new RuntimeException("http task params is not valid"); - } - } - - @Override - public void handle() throws Exception { - String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); - - long startTime = System.currentTimeMillis(); - String statusCode = null; - String body = null; - try(CloseableHttpClient client = createHttpClient()) { - try(CloseableHttpResponse response = sendRequest(client)) { - statusCode = String.valueOf(getStatusCode(response)); - body = getResponseBody(response); - exitStatusCode = validResponse(body, statusCode); - long costTime = System.currentTimeMillis() - startTime; - logger.info("startTime: {}, httpUrl: {}, httpMethod: {}, costTime : {}Millisecond, statusCode : {}, body : {}, log : {}", - DateUtils.format2Readable(startTime), httpParameters.getUrl(),httpParameters.getHttpMethod(), costTime, statusCode, body, output); - }catch (Exception e) { - appendMessage(e.toString()); - exitStatusCode = -1; - logger.error("httpUrl[" + httpParameters.getUrl() + "] connection failed:"+output, e); - } - } catch (Exception e) { - appendMessage(e.toString()); - exitStatusCode = -1; - logger.error("httpUrl[" + httpParameters.getUrl() + "] connection failed:"+output, e); - } - } - - protected CloseableHttpResponse sendRequest(CloseableHttpClient client) throws IOException { - RequestBuilder builder = createRequestBuilder(); - ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); - - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - httpParameters.getLocalParametersMap(), - processInstance.getCmdTypeIfComplement(), - processInstance.getScheduleTime()); - List httpPropertyList = new ArrayList<>(); - if(httpParameters.getHttpParams() != null && httpParameters.getHttpParams().size() > 0){ - for (HttpProperty httpProperty: httpParameters.getHttpParams()) { - String jsonObject = JSONObject.toJSONString(httpProperty); - String params = ParameterUtils.convertParameterPlaceholders(jsonObject,ParamUtils.convert(paramsMap)); - logger.info("http request params:{}",params); - httpPropertyList.add(JSONObject.parseObject(params,HttpProperty.class)); - } - } - addRequestParams(builder,httpPropertyList); - HttpUriRequest request = builder.setUri(httpParameters.getUrl()).build(); - setHeaders(request,httpPropertyList); - return client.execute(request); - } - - protected String getResponseBody(CloseableHttpResponse httpResponse) throws ParseException, IOException { - if (httpResponse == null) { - return null; - } - HttpEntity entity = httpResponse.getEntity(); - if (entity == null) { - return null; - } - String webPage = EntityUtils.toString(entity, Bytes.UTF8_ENCODING); - return webPage; - } - - protected int getStatusCode(CloseableHttpResponse httpResponse) { - int status = httpResponse.getStatusLine().getStatusCode(); - return status; - } - - protected int validResponse(String body, String statusCode){ - int exitStatusCode = 0; - switch (httpParameters.getHttpCheckCondition()) { - case BODY_CONTAINS: - if (StringUtils.isEmpty(body) || !body.contains(httpParameters.getCondition())) { - appendMessage(httpParameters.getUrl() + " doesn contain " - + httpParameters.getCondition()); - exitStatusCode = -1; - } - break; - case BODY_NOT_CONTAINS: - if (StringUtils.isEmpty(body) || body.contains(httpParameters.getCondition())) { - appendMessage(httpParameters.getUrl() + " contains " - + httpParameters.getCondition()); - exitStatusCode = -1; - } - break; - case STATUS_CODE_CUSTOM: - if (!statusCode.equals(httpParameters.getCondition())) { - appendMessage(httpParameters.getUrl() + " statuscode: " + statusCode + ", Must be: " + httpParameters.getCondition()); - exitStatusCode = -1; - } - break; - default: - if (!"200".equals(statusCode)) { - appendMessage(httpParameters.getUrl() + " statuscode: " + statusCode + ", Must be: 200"); - exitStatusCode = -1; - } - break; - } - return exitStatusCode; - } - - public String getOutput() { - return output; - } - - protected void appendMessage(String message) { - if (output == null) { - output = ""; - } - if (message != null && !message.trim().isEmpty()) { - output += message; - } - } - - protected void addRequestParams(RequestBuilder builder,List httpPropertyList) { - if(httpPropertyList != null && httpPropertyList.size() > 0){ - JSONObject jsonParam = new JSONObject(); - for (HttpProperty property: httpPropertyList){ - if(property.getHttpParametersType() != null){ - if (property.getHttpParametersType().equals(HttpParametersType.PARAMETER)){ - builder.addParameter(property.getProp(), property.getValue()); - }else if(property.getHttpParametersType().equals(HttpParametersType.BODY)){ - jsonParam.put(property.getProp(), property.getValue()); - } - } - } - StringEntity postingString = new StringEntity(jsonParam.toString(), Charsets.UTF_8); - postingString.setContentEncoding(Bytes.UTF8_ENCODING); - postingString.setContentType(APPLICATION_JSON); - builder.setEntity(postingString); - } - } - - protected void setHeaders(HttpUriRequest request,List httpPropertyList) { - if(httpPropertyList != null && httpPropertyList.size() > 0){ - for (HttpProperty property: httpPropertyList){ - if(property.getHttpParametersType() != null) { - if (property.getHttpParametersType().equals(HttpParametersType.HEADERS)) { - request.addHeader(property.getProp(), property.getValue()); - } - } - } - } - } - - protected CloseableHttpClient createHttpClient() { - final RequestConfig requestConfig = requestConfig(); - HttpClientBuilder httpClientBuilder; - httpClientBuilder = HttpClients.custom().setDefaultRequestConfig(requestConfig); - return httpClientBuilder.build(); - } - - private RequestConfig requestConfig() { - return RequestConfig.custom().setSocketTimeout(MAX_CONNECTION_MILLISECONDS).setConnectTimeout(MAX_CONNECTION_MILLISECONDS).build(); - } - - protected RequestBuilder createRequestBuilder() { - if (httpParameters.getHttpMethod().equals(HttpMethod.GET)) { - return RequestBuilder.get(); - } else if (httpParameters.getHttpMethod().equals(HttpMethod.POST)) { - return RequestBuilder.post(); - } else if (httpParameters.getHttpMethod().equals(HttpMethod.HEAD)) { - return RequestBuilder.head(); - } else if (httpParameters.getHttpMethod().equals(HttpMethod.PUT)) { - return RequestBuilder.put(); - } else if (httpParameters.getHttpMethod().equals(HttpMethod.DELETE)) { - return RequestBuilder.delete(); - } else { - return null; - } - } - - @Override - public AbstractParameters getParameters() { - return this.httpParameters; - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/mr/MapReduceTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/mr/MapReduceTask.java deleted file mode 100644 index 9650e55e26..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/mr/MapReduceTask.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.mr; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ProgramType; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.mr.MapreduceParameters; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.server.utils.ParamUtils; -import cn.escheduler.server.worker.task.AbstractYarnTask; -import cn.escheduler.server.worker.task.TaskProps; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * mapreduce task - */ -public class MapReduceTask extends AbstractYarnTask { - - - /** - * mapreduce parameters - */ - private MapreduceParameters mapreduceParameters; - - /** - * @param props - * @param logger - */ - public MapReduceTask(TaskProps props, Logger logger) { - super(props, logger); - } - - @Override - public void init() { - - logger.info("mapreduce task params {}", taskProps.getTaskParams()); - - this.mapreduceParameters = JSONUtils.parseObject(taskProps.getTaskParams(), MapreduceParameters.class); - - // check parameters - if (!mapreduceParameters.checkParameters()) { - throw new RuntimeException("mapreduce task params is not valid"); - } - - mapreduceParameters.setQueue(taskProps.getQueue()); - - // replace placeholder - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - mapreduceParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); - if (paramsMap != null){ - String args = ParameterUtils.convertParameterPlaceholders(mapreduceParameters.getMainArgs(), ParamUtils.convert(paramsMap)); - mapreduceParameters.setMainArgs(args); - if(mapreduceParameters.getProgramType() != null && mapreduceParameters.getProgramType() == ProgramType.PYTHON){ - String others = ParameterUtils.convertParameterPlaceholders(mapreduceParameters.getOthers(), ParamUtils.convert(paramsMap)); - mapreduceParameters.setOthers(others); - } - } - } - - @Override - protected String buildCommand() throws Exception { - List parameterList = buildParameters(mapreduceParameters); - - String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", parameterList), - taskProps.getDefinedParams()); - logger.info("mapreduce task command: {}", command); - - return command; - } - - @Override - public AbstractParameters getParameters() { - return mapreduceParameters; - } - - - private List buildParameters(MapreduceParameters mapreduceParameters){ - - List result = new ArrayList<>(); - - result.add(Constants.HADOOP); - - // main jar - if(mapreduceParameters.getMainJar()!= null){ - result.add(Constants.JAR); - result.add(mapreduceParameters.getMainJar().getRes()); - } - - // main class - if(mapreduceParameters.getProgramType() !=null ){ - if(mapreduceParameters.getProgramType()!= ProgramType.PYTHON){ - if(StringUtils.isNotEmpty(mapreduceParameters.getMainClass())){ - result.add(mapreduceParameters.getMainClass()); - } - } - } - - // others - if (StringUtils.isNotEmpty(mapreduceParameters.getOthers())) { - String others = mapreduceParameters.getOthers(); - if(!others.contains(Constants.MR_QUEUE)){ - if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) { - result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue())); - } - } - result.add(mapreduceParameters.getOthers()); - }else if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) { - result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue())); - - } - - // command args - if(StringUtils.isNotEmpty(mapreduceParameters.getMainArgs())){ - result.add(mapreduceParameters.getMainArgs()); - } - return result; - } -} - diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/processdure/ProcedureTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/processdure/ProcedureTask.java deleted file mode 100644 index 3355cbc23a..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/processdure/ProcedureTask.java +++ /dev/null @@ -1,346 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.processdure; - -import cn.escheduler.common.enums.DataType; -import cn.escheduler.common.enums.Direct; -import cn.escheduler.common.enums.TaskTimeoutStrategy; -import cn.escheduler.common.job.db.*; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.procedure.ProcedureParameters; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.DataSource; -import cn.escheduler.server.utils.ParamUtils; -import cn.escheduler.server.worker.task.AbstractTask; -import cn.escheduler.server.worker.task.TaskProps; -import com.alibaba.fastjson.JSONObject; -import com.cronutils.utils.StringUtils; -import org.slf4j.Logger; - -import java.sql.*; -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -import static cn.escheduler.common.enums.DataType.*; - -/** - * procedure task - */ -public class ProcedureTask extends AbstractTask { - - /** - * procedure parameters - */ - private ProcedureParameters procedureParameters; - - /** - * process database access - */ - private ProcessDao processDao; - - /** - * base datasource - */ - private BaseDataSource baseDataSource; - - public ProcedureTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); - - logger.info("procedure task params {}", taskProps.getTaskParams()); - - this.procedureParameters = JSONObject.parseObject(taskProps.getTaskParams(), ProcedureParameters.class); - - // check parameters - if (!procedureParameters.checkParameters()) { - throw new RuntimeException("procedure task params is not valid"); - } - - this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); - } - - @Override - public void handle() throws Exception { - // set the name of the current thread - String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); - - logger.info("processdure type : {}, datasource : {}, method : {} , localParams : {}", - procedureParameters.getType(), - procedureParameters.getDatasource(), - procedureParameters.getMethod(), - procedureParameters.getLocalParams()); - - // determine whether there is a data source - if (procedureParameters.getDatasource() == 0){ - logger.error("datasource id not exists"); - exitStatusCode = -1; - return; - } - - DataSource dataSource = processDao.findDataSourceById(procedureParameters.getDatasource()); - logger.info("datasource name : {} , type : {} , desc : {} , user_id : {} , parameter : {}", - dataSource.getName(), - dataSource.getType(), - dataSource.getNote(), - dataSource.getUserId(), - dataSource.getConnectionParams()); - - if (dataSource == null){ - logger.error("datasource not exists"); - exitStatusCode = -1; - return; - } - Connection connection = null; - CallableStatement stmt = null; - try { - // load class - DataSourceFactory.loadClass(dataSource.getType()); - // get datasource - baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), - dataSource.getConnectionParams()); - - // get jdbc connection - connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), - baseDataSource.getUser(), - baseDataSource.getPassword()); - - - - // combining local and global parameters - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - procedureParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); - - - Collection userDefParamsList = null; - - if (procedureParameters.getLocalParametersMap() != null){ - userDefParamsList = procedureParameters.getLocalParametersMap().values(); - } - - String method = ""; - // no parameters - if (CollectionUtils.isEmpty(userDefParamsList)){ - method = "{call " + procedureParameters.getMethod() + "}"; - }else { // exists parameters - int size = userDefParamsList.size(); - StringBuilder parameter = new StringBuilder(); - parameter.append("("); - for (int i = 0 ;i < size - 1; i++){ - parameter.append("?,"); - } - parameter.append("?)"); - method = "{call " + procedureParameters.getMethod() + parameter.toString()+ "}"; - } - - logger.info("call method : {}",method); - // call method - stmt = connection.prepareCall(method); - if(taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.FAILED || taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.WARNFAILED){ - stmt.setQueryTimeout(taskProps.getTaskTimeout()); - } - Map outParameterMap = new HashMap<>(); - if (userDefParamsList != null && userDefParamsList.size() > 0){ - int index = 1; - for (Property property : userDefParamsList){ - logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}" - ,property.getProp(), - property.getDirect(), - property.getType(), - property.getValue()); - // set parameters - if (property.getDirect().equals(Direct.IN)){ - ParameterUtils.setInParameter(index,stmt,property.getType(),paramsMap.get(property.getProp()).getValue()); - }else if (property.getDirect().equals(Direct.OUT)){ - setOutParameter(index,stmt,property.getType(),paramsMap.get(property.getProp()).getValue()); - property.setValue(paramsMap.get(property.getProp()).getValue()); - outParameterMap.put(index,property); - } - index++; - } - } - - stmt.executeUpdate(); - - /** - * print the output parameters to the log - */ - Iterator> iter = outParameterMap.entrySet().iterator(); - while (iter.hasNext()){ - Map.Entry en = iter.next(); - - int index = en.getKey(); - Property property = en.getValue(); - String prop = property.getProp(); - DataType dataType = property.getType(); - // get output parameter - getOutputParameter(stmt, index, prop, dataType); - } - - exitStatusCode = 0; - }catch (Exception e){ - logger.error(e.getMessage(),e); - exitStatusCode = -1; - throw new RuntimeException(String.format("process interrupted. exit status code is %d",exitStatusCode)); - } - finally { - if (stmt != null) { - try { - stmt.close(); - } catch (SQLException e) { - exitStatusCode = -1; - logger.error(e.getMessage(),e); - } - } - if (connection != null) { - try { - connection.close(); - } catch (SQLException e) { - exitStatusCode = -1; - logger.error(e.getMessage(), e); - } - } - } - } - - /** - * get output parameter - * @param stmt - * @param index - * @param prop - * @param dataType - * @throws SQLException - */ - private void getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException { - switch (dataType){ - case VARCHAR: - logger.info("out prameter key : {} , value : {}",prop,stmt.getString(index)); - break; - case INTEGER: - logger.info("out prameter key : {} , value : {}", prop, stmt.getInt(index)); - break; - case LONG: - logger.info("out prameter key : {} , value : {}",prop,stmt.getLong(index)); - break; - case FLOAT: - logger.info("out prameter key : {} , value : {}",prop,stmt.getFloat(index)); - break; - case DOUBLE: - logger.info("out prameter key : {} , value : {}",prop,stmt.getDouble(index)); - break; - case DATE: - logger.info("out prameter key : {} , value : {}",prop,stmt.getDate(index)); - break; - case TIME: - logger.info("out prameter key : {} , value : {}",prop,stmt.getTime(index)); - break; - case TIMESTAMP: - logger.info("out prameter key : {} , value : {}",prop,stmt.getTimestamp(index)); - break; - case BOOLEAN: - logger.info("out prameter key : {} , value : {}",prop, stmt.getBoolean(index)); - break; - default: - break; - } - } - - @Override - public AbstractParameters getParameters() { - return procedureParameters; - } - - /** - * set out parameter - * @param index - * @param stmt - * @param dataType - * @param value - * @throws Exception - */ - private void setOutParameter(int index,CallableStatement stmt,DataType dataType,String value)throws Exception{ - if (dataType.equals(VARCHAR)){ - if (StringUtils.isEmpty(value)){ - stmt.registerOutParameter(index, Types.VARCHAR); - }else { - stmt.registerOutParameter(index, Types.VARCHAR, value); - } - - }else if (dataType.equals(INTEGER)){ - if (StringUtils.isEmpty(value)){ - stmt.registerOutParameter(index, Types.INTEGER); - }else { - stmt.registerOutParameter(index, Types.INTEGER, value); - } - - }else if (dataType.equals(LONG)){ - if (StringUtils.isEmpty(value)){ - stmt.registerOutParameter(index,Types.INTEGER); - }else { - stmt.registerOutParameter(index,Types.INTEGER ,value); - } - }else if (dataType.equals(FLOAT)){ - if (StringUtils.isEmpty(value)){ - stmt.registerOutParameter(index, Types.FLOAT); - }else { - stmt.registerOutParameter(index, Types.FLOAT,value); - } - }else if (dataType.equals(DOUBLE)){ - if (StringUtils.isEmpty(value)){ - stmt.registerOutParameter(index, Types.DOUBLE); - }else { - stmt.registerOutParameter(index, Types.DOUBLE , value); - } - - }else if (dataType.equals(DATE)){ - if (StringUtils.isEmpty(value)){ - stmt.registerOutParameter(index, Types.DATE); - }else { - stmt.registerOutParameter(index, Types.DATE , value); - } - - }else if (dataType.equals(TIME)){ - if (StringUtils.isEmpty(value)){ - stmt.registerOutParameter(index, Types.TIME); - }else { - stmt.registerOutParameter(index, Types.TIME , value); - } - - }else if (dataType.equals(TIMESTAMP)){ - if (StringUtils.isEmpty(value)){ - stmt.registerOutParameter(index, Types.TIMESTAMP); - }else { - stmt.registerOutParameter(index, Types.TIMESTAMP , value); - } - - }else if (dataType.equals(BOOLEAN)){ - if (StringUtils.isEmpty(value)){ - stmt.registerOutParameter(index, Types.BOOLEAN); - }else { - stmt.registerOutParameter(index, Types.BOOLEAN , value); - } - } - } -} \ No newline at end of file diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/python/PythonTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/python/PythonTask.java deleted file mode 100644 index cb2bf7d27e..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/python/PythonTask.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.python; - - -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.python.PythonParameters; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.server.utils.ParamUtils; -import cn.escheduler.server.worker.task.AbstractTask; -import cn.escheduler.server.worker.task.PythonCommandExecutor; -import cn.escheduler.server.worker.task.TaskProps; -import org.slf4j.Logger; - - -import java.util.Map; - -/** - * python task - */ -public class PythonTask extends AbstractTask { - - /** - * python parameters - */ - private PythonParameters pythonParameters; - - /** - * task dir - */ - private String taskDir; - - /** - * python command executor - */ - private PythonCommandExecutor pythonCommandExecutor; - - /** - * process database access - */ - private ProcessDao processDao; - - - public PythonTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); - - this.taskDir = taskProps.getTaskDir(); - - this.pythonCommandExecutor = new PythonCommandExecutor(this::logHandle, - taskProps.getTaskDir(), - taskProps.getTaskAppId(), - taskProps.getTaskInstId(), - taskProps.getTenantCode(), - taskProps.getEnvFile(), - taskProps.getTaskStartTime(), - taskProps.getTaskTimeout(), - logger); - this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); - } - - @Override - public void init() { - logger.info("python task params {}", taskProps.getTaskParams()); - - pythonParameters = JSONUtils.parseObject(taskProps.getTaskParams(), PythonParameters.class); - - if (!pythonParameters.checkParameters()) { - throw new RuntimeException("python task params is not valid"); - } - } - - @Override - public void handle() throws Exception { - try { - // construct process - exitStatusCode = pythonCommandExecutor.run(buildCommand(), processDao); - } catch (Exception e) { - logger.error("python task failure", e); - exitStatusCode = -1; - } - } - - @Override - public void cancelApplication(boolean cancelApplication) throws Exception { - // cancel process - pythonCommandExecutor.cancelApplication(); - } - - /** - * build command - * @return - * @throws Exception - */ - private String buildCommand() throws Exception { - String rawPythonScript = pythonParameters.getRawScript().replaceAll("\\r\\n", "\n"); - - /** - * combining local and global parameters - */ - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - pythonParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); - if (paramsMap != null){ - rawPythonScript = ParameterUtils.convertParameterPlaceholders(rawPythonScript, ParamUtils.convert(paramsMap)); - } - - logger.info("raw python script : {}", pythonParameters.getRawScript()); - logger.info("task dir : {}", taskDir); - - return rawPythonScript; - } - - @Override - public AbstractParameters getParameters() { - return pythonParameters; - } - - - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/shell/ShellTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/shell/ShellTask.java deleted file mode 100644 index 41810fc176..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/shell/ShellTask.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.shell; - - -import cn.escheduler.common.Constants; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.shell.ShellParameters; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.server.utils.ParamUtils; -import cn.escheduler.server.worker.task.AbstractTask; -import cn.escheduler.server.worker.task.ShellCommandExecutor; -import cn.escheduler.server.worker.task.TaskProps; -import org.slf4j.Logger; - -import java.io.File; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardOpenOption; -import java.nio.file.attribute.FileAttribute; -import java.nio.file.attribute.PosixFilePermission; -import java.nio.file.attribute.PosixFilePermissions; -import java.util.Map; -import java.util.Set; - -/** - * shell task - */ -public class ShellTask extends AbstractTask { - - private ShellParameters shellParameters; - - /** - * task dir - */ - private String taskDir; - - private ShellCommandExecutor shellCommandExecutor; - - /** - * process database access - */ - private ProcessDao processDao; - - - public ShellTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); - - this.taskDir = taskProps.getTaskDir(); - - this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskProps.getTaskDir(), - taskProps.getTaskAppId(), - taskProps.getTaskInstId(), - taskProps.getTenantCode(), - taskProps.getEnvFile(), - taskProps.getTaskStartTime(), - taskProps.getTaskTimeout(), - logger); - this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); - } - - @Override - public void init() { - logger.info("shell task params {}", taskProps.getTaskParams()); - - shellParameters = JSONUtils.parseObject(taskProps.getTaskParams(), ShellParameters.class); - - if (!shellParameters.checkParameters()) { - throw new RuntimeException("shell task params is not valid"); - } - } - - @Override - public void handle() throws Exception { - try { - // construct process - exitStatusCode = shellCommandExecutor.run(buildCommand(), processDao); - } catch (Exception e) { - logger.error("shell task failure", e); - exitStatusCode = -1; - } - } - - @Override - public void cancelApplication(boolean cancelApplication) throws Exception { - // cancel process - shellCommandExecutor.cancelApplication(); - } - - /** - * create command - * @return - * @throws Exception - */ - private String buildCommand() throws Exception { - // generate scripts - String fileName = String.format("%s/%s_node.sh", taskDir, taskProps.getTaskAppId()); - Path path = new File(fileName).toPath(); - - if (Files.exists(path)) { - return fileName; - } - - String script = shellParameters.getRawScript().replaceAll("\\r\\n", "\n"); - - - /** - * combining local and global parameters - */ - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - shellParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); - if (paramsMap != null){ - script = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); - } - - - shellParameters.setRawScript(script); - - logger.info("raw script : {}", shellParameters.getRawScript()); - logger.info("task dir : {}", taskDir); - - Set perms = PosixFilePermissions.fromString(Constants.RWXR_XR_X); - FileAttribute> attr = PosixFilePermissions.asFileAttribute(perms); - - Files.createFile(path, attr); - - Files.write(path, shellParameters.getRawScript().getBytes(), StandardOpenOption.APPEND); - - return fileName; - } - - @Override - public AbstractParameters getParameters() { - return shellParameters; - } - - - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/spark/SparkTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/spark/SparkTask.java deleted file mode 100644 index d9c58e9f7d..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/spark/SparkTask.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.spark; - -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.spark.SparkParameters; -import cn.escheduler.common.utils.JSONUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.server.utils.ParamUtils; -import cn.escheduler.server.utils.SparkArgsUtils; -import cn.escheduler.server.worker.task.AbstractYarnTask; -import cn.escheduler.server.worker.task.TaskProps; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * spark task - */ -public class SparkTask extends AbstractYarnTask { - - /** - * spark command - */ - private static final String SPARK_COMMAND = "spark-submit"; - - /** - * spark parameters - */ - private SparkParameters sparkParameters; - - public SparkTask(TaskProps props, Logger logger) { - super(props, logger); - } - - @Override - public void init() { - - logger.info("spark task params {}", taskProps.getTaskParams()); - - sparkParameters = JSONUtils.parseObject(taskProps.getTaskParams(), SparkParameters.class); - - if (!sparkParameters.checkParameters()) { - throw new RuntimeException("spark task params is not valid"); - } - sparkParameters.setQueue(taskProps.getQueue()); - - if (StringUtils.isNotEmpty(sparkParameters.getMainArgs())) { - String args = sparkParameters.getMainArgs(); - - /** - * combining local and global parameters - */ - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - sparkParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); - if (paramsMap != null ){ - args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap)); - } - sparkParameters.setMainArgs(args); - } - } - - /** - * create command - * @return - */ - @Override - protected String buildCommand() { - List args = new ArrayList<>(); - - args.add(SPARK_COMMAND); - - // other parameters - args.addAll(SparkArgsUtils.buildArgs(sparkParameters)); - - String command = ParameterUtils - .convertParameterPlaceholders(String.join(" ", args), taskProps.getDefinedParams()); - - logger.info("spark task command : {}", command); - - return command; - } - - @Override - public AbstractParameters getParameters() { - return sparkParameters; - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java deleted file mode 100644 index 8f8c94080a..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java +++ /dev/null @@ -1,447 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.sql; - -import cn.escheduler.alert.utils.MailUtils; -import cn.escheduler.common.enums.ShowType; -import cn.escheduler.common.enums.TaskTimeoutStrategy; -import cn.escheduler.common.enums.UdfType; -import cn.escheduler.common.job.db.*; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.task.AbstractParameters; -import cn.escheduler.common.task.sql.SqlBinds; -import cn.escheduler.common.task.sql.SqlParameters; -import cn.escheduler.common.task.sql.SqlType; -import cn.escheduler.common.utils.CollectionUtils; -import cn.escheduler.common.utils.CommonUtils; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.dao.AlertDao; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.DataSource; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.UdfFunc; -import cn.escheduler.dao.entity.User; -import cn.escheduler.server.utils.ParamUtils; -import cn.escheduler.server.utils.UDFUtils; -import cn.escheduler.server.worker.task.AbstractTask; -import cn.escheduler.server.worker.task.TaskProps; -import com.alibaba.fastjson.JSONArray; -import com.alibaba.fastjson.JSONObject; -import com.alibaba.fastjson.serializer.SerializerFeature; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang3.EnumUtils; -import org.slf4j.Logger; - -import java.sql.*; -import java.util.*; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -import static cn.escheduler.common.Constants.*; -import static cn.escheduler.common.enums.DbType.*; - -/** - * sql task - */ -public class SqlTask extends AbstractTask { - - /** - * sql parameters - */ - private SqlParameters sqlParameters; - - /** - * process database access - */ - private ProcessDao processDao; - - /** - * alert dao - */ - private AlertDao alertDao; - - /** - * datasource - */ - private DataSource dataSource; - - /** - * base datasource - */ - private BaseDataSource baseDataSource; - - - public SqlTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); - - logger.info("sql task params {}", taskProps.getTaskParams()); - this.sqlParameters = JSONObject.parseObject(taskProps.getTaskParams(), SqlParameters.class); - - if (!sqlParameters.checkParameters()) { - throw new RuntimeException("sql task params is not valid"); - } - this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); - this.alertDao = DaoFactory.getDaoInstance(AlertDao.class); - } - - @Override - public void handle() throws Exception { - // set the name of the current thread - String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); - logger.info(sqlParameters.toString()); - logger.info("sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {}", - sqlParameters.getType(), - sqlParameters.getDatasource(), - sqlParameters.getSql(), - sqlParameters.getLocalParams(), - sqlParameters.getUdfs(), - sqlParameters.getShowType(), - sqlParameters.getConnParams()); - - // not set data source - if (sqlParameters.getDatasource() == 0){ - logger.error("datasource id not exists"); - exitStatusCode = -1; - return; - } - - dataSource= processDao.findDataSourceById(sqlParameters.getDatasource()); - logger.info("datasource name : {} , type : {} , desc : {} , user_id : {} , parameter : {}", - dataSource.getName(), - dataSource.getType(), - dataSource.getNote(), - dataSource.getUserId(), - dataSource.getConnectionParams()); - - if (dataSource == null){ - logger.error("datasource not exists"); - exitStatusCode = -1; - return; - } - - Connection con = null; - List createFuncs = null; - try { - // load class - DataSourceFactory.loadClass(dataSource.getType()); - // get datasource - baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), - dataSource.getConnectionParams()); - - // ready to execute SQL and parameter entity Map - SqlBinds mainSqlBinds = getSqlAndSqlParamsMap(sqlParameters.getSql()); - List preStatementSqlBinds = Optional.ofNullable(sqlParameters.getPreStatements()) - .orElse(new ArrayList<>()) - .stream() - .map(this::getSqlAndSqlParamsMap) - .collect(Collectors.toList()); - List postStatementSqlBinds = Optional.ofNullable(sqlParameters.getPostStatements()) - .orElse(new ArrayList<>()) - .stream() - .map(this::getSqlAndSqlParamsMap) - .collect(Collectors.toList()); - - // determine if it is UDF - boolean udfTypeFlag = EnumUtils.isValidEnum(UdfType.class, sqlParameters.getType()) - && StringUtils.isNotEmpty(sqlParameters.getUdfs()); - if(udfTypeFlag){ - List udfFuncList = processDao.queryUdfFunListByids(sqlParameters.getUdfs()); - createFuncs = UDFUtils.createFuncs(udfFuncList, taskProps.getTenantCode(), logger); - } - - // execute sql task - con = executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - throw e; - } - } - } - } - - /** - * ready to execute SQL and parameter entity Map - * @return - */ - private SqlBinds getSqlAndSqlParamsMap(String sql) { - Map sqlParamsMap = new HashMap<>(); - StringBuilder sqlBuilder = new StringBuilder(); - - // find process instance by task id - - - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - sqlParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); - - // spell SQL according to the final user-defined variable - if(paramsMap == null){ - sqlBuilder.append(sql); - return new SqlBinds(sqlBuilder.toString(), sqlParamsMap); - } - - if (StringUtils.isNotEmpty(sqlParameters.getTitle())){ - String title = ParameterUtils.convertParameterPlaceholders(sqlParameters.getTitle(), - ParamUtils.convert(paramsMap)); - logger.info("SQL tile : {}",title); - sqlParameters.setTitle(title); - } - - // special characters need to be escaped, ${} needs to be escaped - String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*"; - setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap); - - // replace the ${} of the SQL statement with the Placeholder - String formatSql = sql.replaceAll(rgex,"?"); - sqlBuilder.append(formatSql); - - // print repalce sql - printReplacedSql(sql,formatSql,rgex,sqlParamsMap); - return new SqlBinds(sqlBuilder.toString(), sqlParamsMap); - } - - @Override - public AbstractParameters getParameters() { - return this.sqlParameters; - } - - /** - * execute sql - * @param mainSqlBinds - * @param preStatementsBinds - * @param postStatementsBinds - * @param createFuncs - * @return - */ - public Connection executeFuncAndSql(SqlBinds mainSqlBinds, - List preStatementsBinds, - List postStatementsBinds, - List createFuncs){ - Connection connection = null; - try { - // if upload resource is HDFS and kerberos startup - CommonUtils.loadKerberosConf(); - - // if hive , load connection params if exists - if (HIVE == dataSource.getType()) { - Properties paramProp = new Properties(); - paramProp.setProperty(USER, baseDataSource.getUser()); - paramProp.setProperty(PASSWORD, baseDataSource.getPassword()); - Map connParamMap = CollectionUtils.stringToMap(sqlParameters.getConnParams(), - SEMICOLON, - HIVE_CONF); - if(connParamMap != null){ - paramProp.putAll(connParamMap); - } - - connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), - paramProp); - }else{ - connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), - baseDataSource.getUser(), - baseDataSource.getPassword()); - } - - // create temp function - if (CollectionUtils.isNotEmpty(createFuncs)) { - try (Statement funcStmt = connection.createStatement()) { - for (String createFunc : createFuncs) { - logger.info("hive create function sql: {}", createFunc); - funcStmt.execute(createFunc); - } - } - } - - for (SqlBinds sqlBind: preStatementsBinds) { - try (PreparedStatement stmt = prepareStatementAndBind(connection, sqlBind)) { - int result = stmt.executeUpdate(); - logger.info("pre statement execute result: {}, for sql: {}",result,sqlBind.getSql()); - } - } - - try (PreparedStatement stmt = prepareStatementAndBind(connection, mainSqlBinds)) { - // decide whether to executeQuery or executeUpdate based on sqlType - if (sqlParameters.getSqlType() == SqlType.QUERY.ordinal()) { - // query statements need to be convert to JsonArray and inserted into Alert to send - JSONArray resultJSONArray = new JSONArray(); - ResultSet resultSet = stmt.executeQuery(); - ResultSetMetaData md = resultSet.getMetaData(); - int num = md.getColumnCount(); - - while (resultSet.next()) { - JSONObject mapOfColValues = new JSONObject(true); - for (int i = 1; i <= num; i++) { - mapOfColValues.put(md.getColumnName(i), resultSet.getObject(i)); - } - resultJSONArray.add(mapOfColValues); - } - - logger.debug("execute sql : {}", JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); - - // if there is a result set - if (resultJSONArray.size() > 0) { - if (StringUtils.isNotEmpty(sqlParameters.getTitle())) { - sendAttachment(sqlParameters.getTitle(), - JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); - }else{ - sendAttachment(taskProps.getNodeName() + " query resultsets ", - JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); - } - } - - exitStatusCode = 0; - - } else if (sqlParameters.getSqlType() == SqlType.NON_QUERY.ordinal()) { - // non query statement - stmt.executeUpdate(); - exitStatusCode = 0; - } - } - - for (SqlBinds sqlBind: postStatementsBinds) { - try (PreparedStatement stmt = prepareStatementAndBind(connection, sqlBind)) { - int result = stmt.executeUpdate(); - logger.info("post statement execute result: {},for sql: {}",result,sqlBind.getSql()); - } - } - } catch (Exception e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage()); - } - return connection; - } - - /** - * preparedStatement bind - * @param connection - * @param sqlBinds - * @return - * @throws Exception - */ - private PreparedStatement prepareStatementAndBind(Connection connection, SqlBinds sqlBinds) throws Exception { - PreparedStatement stmt = connection.prepareStatement(sqlBinds.getSql()); - // is the timeout set - boolean timeoutFlag = taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.FAILED || - taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.WARNFAILED; - if(timeoutFlag){ - stmt.setQueryTimeout(taskProps.getTaskTimeout()); - } - Map params = sqlBinds.getParamsMap(); - if(params != null){ - for(Integer key : params.keySet()){ - Property prop = params.get(key); - ParameterUtils.setInParameter(key,stmt,prop.getType(),prop.getValue()); - } - } - logger.info("prepare statement replace sql : {} ",stmt.toString()); - return stmt; - } - - /** - * send mail as an attachment - * @param title - * @param content - */ - public void sendAttachment(String title,String content){ - - // process instance - ProcessInstance instance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); - - List users = alertDao.queryUserByAlertGroupId(instance.getWarningGroupId()); - - // receiving group list - List receviersList = new ArrayList(); - for(User user:users){ - receviersList.add(user.getEmail().trim()); - } - // custom receiver - String receivers = sqlParameters.getReceivers(); - if (StringUtils.isNotEmpty(receivers)){ - String[] splits = receivers.split(COMMA); - for (String receiver : splits){ - receviersList.add(receiver.trim()); - } - } - - // copy list - List receviersCcList = new ArrayList(); - // Custom Copier - String receiversCc = sqlParameters.getReceiversCc(); - if (StringUtils.isNotEmpty(receiversCc)){ - String[] splits = receiversCc.split(COMMA); - for (String receiverCc : splits){ - receviersCcList.add(receiverCc.trim()); - } - } - - String showTypeName = sqlParameters.getShowType().replace(COMMA,"").trim(); - if(EnumUtils.isValidEnum(ShowType.class,showTypeName)){ - Map mailResult = MailUtils.sendMails(receviersList, - receviersCcList, title, content, ShowType.valueOf(showTypeName)); - if(!(Boolean) mailResult.get(STATUS)){ - throw new RuntimeException("send mail failed!"); - } - }else{ - logger.error("showType: {} is not valid " ,showTypeName); - throw new RuntimeException(String.format("showType: %s is not valid ",showTypeName)); - } - } - - /** - * regular expressions match the contents between two specified strings - * @param content - * @return - */ - public void setSqlParamsMap(String content, String rgex, Map sqlParamsMap, Map paramsPropsMap){ - Pattern pattern = Pattern.compile(rgex); - Matcher m = pattern.matcher(content); - int index = 1; - while (m.find()) { - - String paramName = m.group(1); - Property prop = paramsPropsMap.get(paramName); - - sqlParamsMap.put(index,prop); - index ++; - } - } - - /** - * print replace sql - * @param content - * @param formatSql - * @param rgex - * @param sqlParamsMap - */ - public void printReplacedSql(String content, String formatSql,String rgex, Map sqlParamsMap){ - //parameter print style - logger.info("after replace sql , preparing : {}" , formatSql); - StringBuilder logPrint = new StringBuilder("replaced sql , parameters:"); - for(int i=1;i<=sqlParamsMap.size();i++){ - logPrint.append(sqlParamsMap.get(i).getValue()+"("+sqlParamsMap.get(i).getType()+")"); - } - logger.info(logPrint.toString()); - } -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java b/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java deleted file mode 100644 index 2746b3172b..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java +++ /dev/null @@ -1,411 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.zk; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.enums.ZKNodeType; -import cn.escheduler.common.model.MasterServer; -import cn.escheduler.common.zk.AbstractZKClient; -import cn.escheduler.dao.AlertDao; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.server.utils.ProcessUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.cache.PathChildrenCache; -import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; -import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; -import org.apache.curator.framework.recipes.locks.InterProcessMutex; -import org.apache.curator.utils.ThreadUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Date; -import java.util.List; -import java.util.concurrent.ThreadFactory; - - -/** - * zookeeper master client - * - * single instance - */ -public class ZKMasterClient extends AbstractZKClient { - - private static final Logger logger = LoggerFactory.getLogger(ZKMasterClient.class); - - private static final ThreadFactory defaultThreadFactory = ThreadUtils.newGenericThreadFactory("Master-Main-Thread"); - - /** - * master znode - */ - private String masterZNode = null; - - /** - * alert database access - */ - private AlertDao alertDao = null; - /** - * flow database access - */ - private ProcessDao processDao; - - /** - * zkMasterClient - */ - private static ZKMasterClient zkMasterClient = null; - - - private ZKMasterClient(ProcessDao processDao){ - this.processDao = processDao; - init(); - } - - private ZKMasterClient(){} - - /** - * get zkMasterClient - * @param processDao - * @return - */ - public static synchronized ZKMasterClient getZKMasterClient(ProcessDao processDao){ - if(zkMasterClient == null){ - zkMasterClient = new ZKMasterClient(processDao); - } - zkMasterClient.processDao = processDao; - - return zkMasterClient; - } - - /** - * init - */ - public void init(){ - // init dao - this.initDao(); - - InterProcessMutex mutex = null; - try { - // create distributed lock with the root node path of the lock space as /escheduler/lock/failover/master - String znodeLock = getMasterStartUpLockPath(); - mutex = new InterProcessMutex(zkClient, znodeLock); - mutex.acquire(); - - // init system znode - this.initSystemZNode(); - - // monitor master - this.listenerMaster(); - - // monitor worker - this.listenerWorker(); - - // register master - this.registerMaster(); - - // check if fault tolerance is required,failure and tolerance - if (getActiveMasterNum() == 1) { - failoverWorker(null, true); - failoverMaster(null); - } - - }catch (Exception e){ - logger.error("master start up exception : " + e.getMessage(),e); - }finally { - releaseMutex(mutex); - } - } - - - - - /** - * init dao - */ - public void initDao(){ - this.alertDao = DaoFactory.getDaoInstance(AlertDao.class); - this.processDao = DaoFactory.getDaoInstance(ProcessDao.class); - } - /** - * get alert dao - * @return - */ - public AlertDao getAlertDao() { - return alertDao; - } - - - - - /** - * register master znode - */ - public void registerMaster(){ - try { - String serverPath = registerServer(ZKNodeType.MASTER); - if(StringUtils.isEmpty(serverPath)){ - System.exit(-1); - } - masterZNode = serverPath; - } catch (Exception e) { - logger.error("register master failure : " + e.getMessage(),e); - System.exit(-1); - } - } - - - - /** - * monitor master - */ - public void listenerMaster(){ - PathChildrenCache masterPc = new PathChildrenCache(zkClient, - getZNodeParentPath(ZKNodeType.MASTER), true ,defaultThreadFactory); - - try { - masterPc.start(); - masterPc.getListenable().addListener(new PathChildrenCacheListener() { - @Override - public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception { - switch (event.getType()) { - case CHILD_ADDED: - logger.info("master node added : {}",event.getData().getPath()); - break; - case CHILD_REMOVED: - String path = event.getData().getPath(); - String serverHost = getHostByEventDataPath(path); - if(checkServerSelfDead(serverHost, ZKNodeType.MASTER)){ - return; - } - removeZKNodePath(path, ZKNodeType.MASTER, true); - break; - case CHILD_UPDATED: - break; - default: - break; - } - } - }); - }catch (Exception e){ - logger.error("monitor master failed : " + e.getMessage(),e); - } -} - - private void removeZKNodePath(String path, ZKNodeType zkNodeType, boolean failover) { - logger.info("{} node deleted : {}", zkNodeType.toString(), path); - InterProcessMutex mutex = null; - try { - String failoverPath = getFailoverLockPath(zkNodeType); - // create a distributed lock - mutex = new InterProcessMutex(getZkClient(), failoverPath); - mutex.acquire(); - - String serverHost = getHostByEventDataPath(path); - // handle dead server - handleDeadServer(path, zkNodeType, Constants.ADD_ZK_OP); - //alert server down. - alertServerDown(serverHost, zkNodeType); - //failover server - if(failover){ - failoverServerWhenDown(serverHost, zkNodeType); - } - }catch (Exception e){ - logger.error("{} server failover failed.", zkNodeType.toString()); - logger.error("failover exception : " + e.getMessage(),e); - } - finally { - releaseMutex(mutex); - } - } - - private void failoverServerWhenDown(String serverHost, ZKNodeType zkNodeType) throws Exception { - if(StringUtils.isEmpty(serverHost)){ - return ; - } - switch (zkNodeType){ - case MASTER: - failoverMaster(serverHost); - break; - case WORKER: - failoverWorker(serverHost, true); - default: - break; - } - } - - private String getFailoverLockPath(ZKNodeType zkNodeType){ - - switch (zkNodeType){ - case MASTER: - return getMasterFailoverLockPath(); - case WORKER: - return getWorkerFailoverLockPath(); - default: - return ""; - } - } - - private void alertServerDown(String serverHost, ZKNodeType zkNodeType) { - - String serverType = zkNodeType.toString(); - for (int i = 0; i < Constants.ESCHEDULER_WARN_TIMES_FAILOVER; i++) { - alertDao.sendServerStopedAlert(1, serverHost, serverType); - } - } - - /** - * monitor worker - */ - public void listenerWorker(){ - - PathChildrenCache workerPc = new PathChildrenCache(zkClient, - getZNodeParentPath(ZKNodeType.WORKER),true ,defaultThreadFactory); - try { - workerPc.start(); - workerPc.getListenable().addListener(new PathChildrenCacheListener() { - @Override - public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) { - switch (event.getType()) { - case CHILD_ADDED: - logger.info("node added : {}" ,event.getData().getPath()); - break; - case CHILD_REMOVED: - String path = event.getData().getPath(); - logger.info("node deleted : {}",event.getData().getPath()); - removeZKNodePath(path, ZKNodeType.WORKER, true); - break; - default: - break; - } - } - }); - }catch (Exception e){ - logger.error("listener worker failed : " + e.getMessage(),e); - } - } - - - /** - * get master znode - * @return - */ - public String getMasterZNode() { - return masterZNode; - } - - /** - * task needs failover if task start before worker starts - * - * @param taskInstance - * @return - */ - private boolean checkTaskInstanceNeedFailover(TaskInstance taskInstance) throws Exception { - - boolean taskNeedFailover = true; - - //now no host will execute this task instance,so no need to failover the task - if(taskInstance.getHost() == null){ - return false; - } - - // if the worker node exists in zookeeper, we must check the task starts after the worker - if(checkZKNodeExists(taskInstance.getHost(), ZKNodeType.WORKER)){ - //if task start after worker starts, there is no need to failover the task. - if(checkTaskAfterWorkerStart(taskInstance)){ - taskNeedFailover = false; - } - } - return taskNeedFailover; - } - - /** - * check task start after the worker server starts. - * @param taskInstance - * @return - */ - private boolean checkTaskAfterWorkerStart(TaskInstance taskInstance) { - if(StringUtils.isEmpty(taskInstance.getHost())){ - return false; - } - Date workerServerStartDate = null; - List workerServers= getServersList(ZKNodeType.WORKER); - for(MasterServer server : workerServers){ - if(server.getHost().equals(taskInstance.getHost())){ - workerServerStartDate = server.getCreateTime(); - break; - } - } - - if(workerServerStartDate != null){ - return taskInstance.getStartTime().after(workerServerStartDate); - }else{ - return false; - } - } - - /** - * failover worker tasks - * 1. kill yarn job if there are yarn jobs in tasks. - * 2. change task state from running to need failover. - * 3. failover all tasks when workerHost is null - * @param workerHost - */ - private void failoverWorker(String workerHost, boolean needCheckWorkerAlive) throws Exception { - logger.info("start worker[{}] failover ...", workerHost); - - List needFailoverTaskInstanceList = processDao.queryNeedFailoverTaskInstances(workerHost); - for(TaskInstance taskInstance : needFailoverTaskInstanceList){ - if(needCheckWorkerAlive){ - if(!checkTaskInstanceNeedFailover(taskInstance)){ - continue; - } - } - - ProcessInstance instance = processDao.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); - if(instance!=null){ - taskInstance.setProcessInstance(instance); - } - // only kill yarn job if exists , the local thread has exited - ProcessUtils.killYarnJob(taskInstance); - - taskInstance.setState(ExecutionStatus.NEED_FAULT_TOLERANCE); - processDao.saveTaskInstance(taskInstance); - } - logger.info("end worker[{}] failover ...", workerHost); - } - - /** - * failover master tasks - * @param masterHost - */ - private void failoverMaster(String masterHost) { - logger.info("start master failover ..."); - - List needFailoverProcessInstanceList = processDao.queryNeedFailoverProcessInstances(masterHost); - - //updateProcessInstance host is null and insert into command - for(ProcessInstance processInstance : needFailoverProcessInstanceList){ - processDao.processNeedFailoverProcessInstances(processInstance); - } - - logger.info("master failover end"); - } - -} diff --git a/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKWorkerClient.java b/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKWorkerClient.java deleted file mode 100644 index aeea65f2bc..0000000000 --- a/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKWorkerClient.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.zk; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ZKNodeType; -import cn.escheduler.common.zk.AbstractZKClient; -import org.apache.commons.lang.StringUtils; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.cache.PathChildrenCache; -import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; -import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; -import org.apache.curator.utils.ThreadUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Date; -import java.util.concurrent.ThreadFactory; - - -/** - * zookeeper worker client - * single instance - */ -public class ZKWorkerClient extends AbstractZKClient { - - private static final Logger logger = LoggerFactory.getLogger(ZKWorkerClient.class); - - - private static final ThreadFactory defaultThreadFactory = ThreadUtils.newGenericThreadFactory("Worker-Main-Thread"); - - - /** - * worker znode - */ - private String workerZNode = null; - - /** - * create time - */ - private Date createTime = null; - - /** - * zkWorkerClient - */ - private static ZKWorkerClient zkWorkerClient = null; - - private ZKWorkerClient(){ - init(); - } - - /** - * init - */ - private void init(){ - - // init system znode - this.initSystemZNode(); - - // monitor worker - this.listenerWorker(); - - // register worker - this.registWorker(); - } - - - /** - * get zkWorkerClient - * - * @return - */ - public static synchronized ZKWorkerClient getZKWorkerClient(){ - if(zkWorkerClient == null){ - zkWorkerClient = new ZKWorkerClient(); - } - return zkWorkerClient; - } - - - /** - * register worker - */ - private void registWorker(){ - try { - String serverPath = registerServer(ZKNodeType.WORKER); - if(StringUtils.isEmpty(serverPath)){ - System.exit(-1); - } - workerZNode = serverPath; - } catch (Exception e) { - logger.error("register worker failure : " + e.getMessage(),e); - System.exit(-1); - } - } - - /** - * monitor worker - */ - private void listenerWorker(){ - PathChildrenCache workerPc = new PathChildrenCache(zkClient, getZNodeParentPath(ZKNodeType.WORKER), true, defaultThreadFactory); - try { - - Date now = new Date(); - createTime = now ; - workerPc.start(); - workerPc.getListenable().addListener(new PathChildrenCacheListener() { - @Override - public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception { - switch (event.getType()) { - case CHILD_ADDED: - logger.info("node added : {}" ,event.getData().getPath()); - break; - case CHILD_REMOVED: - String path = event.getData().getPath(); - //find myself dead - String serverHost = getHostByEventDataPath(path); - if(checkServerSelfDead(serverHost, ZKNodeType.WORKER)){ - return; - } - break; - case CHILD_UPDATED: - break; - default: - break; - } - } - }); - }catch (Exception e){ - logger.error("monitor worker failed : " + e.getMessage(),e); - } - - } - - /** - * get worker znode - * @return - */ - public String getWorkerZNode() { - return workerZNode; - } - - /** - * get worker lock path - * @return - */ - public String getWorkerLockPath(){ - return conf.getString(Constants.ZOOKEEPER_ESCHEDULER_LOCK_WORKERS); - } - - -} diff --git a/escheduler-server/src/main/resources/master_logback.xml b/escheduler-server/src/main/resources/master_logback.xml deleted file mode 100644 index d93878218e..0000000000 --- a/escheduler-server/src/main/resources/master_logback.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - ${log.base}/escheduler-master.log - - INFO - - - ${log.base}/escheduler-master.%d{yyyy-MM-dd_HH}.%i.log - 168 - 200MB - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - \ No newline at end of file diff --git a/escheduler-server/src/main/resources/worker_logback.xml b/escheduler-server/src/main/resources/worker_logback.xml deleted file mode 100644 index b3926f1a78..0000000000 --- a/escheduler-server/src/main/resources/worker_logback.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - INFO - - - - taskAppId - ${log.base} - - - - ${log.base}/${taskAppId}.log - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - true - - - - - - ${log.base}/escheduler-worker.log - - INFO - - - - ${log.base}/escheduler-worker.%d{yyyy-MM-dd_HH}.%i.log - 168 - 200MB - -       - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - -    - - - - - - - - \ No newline at end of file diff --git a/escheduler-server/src/test/java/cn/escheduler/server/master/AlertManagerTest.java b/escheduler-server/src/test/java/cn/escheduler/server/master/AlertManagerTest.java deleted file mode 100644 index 861995d77f..0000000000 --- a/escheduler-server/src/test/java/cn/escheduler/server/master/AlertManagerTest.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master; - -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.entity.ProcessInstance; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.dao.mapper.ProcessDefinitionMapper; -import cn.escheduler.dao.mapper.ProcessInstanceMapper; -import cn.escheduler.dao.mapper.TaskInstanceMapper; -import cn.escheduler.server.utils.AlertManager; -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; - -import java.util.ArrayList; -import java.util.List; - - -/** - * alert manager test - */ -@Ignore -public class AlertManagerTest { - - private static final Logger logger = LoggerFactory.getLogger(AlertManagerTest.class); - - @Autowired - ProcessDefinitionMapper processDefinitionMapper; - - @Autowired - ProcessInstanceMapper processInstanceMapper; - - @Autowired - TaskInstanceMapper taskInstanceMapper; - - AlertManager alertManager; - - /** - * send worker alert fault tolerance - */ - @Test - public void sendWarnningWorkerleranceFaultTest(){ - // process instance - ProcessInstance processInstance = processInstanceMapper.queryDetailById(13028); - - // set process definition - ProcessDefinition processDefinition = processDefinitionMapper.selectById(47); - processInstance.setProcessDefinition(processDefinition); - - - // fault task instance - TaskInstance toleranceTask1 = taskInstanceMapper.queryById(5038); - TaskInstance toleranceTask2 = taskInstanceMapper.queryById(5039); - - List toleranceTaskList = new ArrayList<>(2); - toleranceTaskList.add(toleranceTask1); - toleranceTaskList.add(toleranceTask2); - - alertManager.sendAlertWorkerToleranceFault(processInstance, toleranceTaskList); - } - - - /** - * send worker alert fault tolerance - */ - @Test - public void sendWarnningOfProcessInstanceTest(){ - // process instance - ProcessInstance processInstance = processInstanceMapper.queryDetailById(13028); - - // set process definition - ProcessDefinition processDefinition = processDefinitionMapper.selectById(47); - processInstance.setProcessDefinition(processDefinition); - - - // fault task instance - TaskInstance toleranceTask1 = taskInstanceMapper.queryById(5038); - toleranceTask1.setState(ExecutionStatus.FAILURE); - TaskInstance toleranceTask2 = taskInstanceMapper.queryById(5039); - toleranceTask2.setState(ExecutionStatus.FAILURE); - - List toleranceTaskList = new ArrayList<>(2); - toleranceTaskList.add(toleranceTask1); - toleranceTaskList.add(toleranceTask2); - - alertManager.sendAlertProcessInstance(processInstance, toleranceTaskList); - } - -} diff --git a/escheduler-server/src/test/java/cn/escheduler/server/master/MasterCommandTest.java b/escheduler-server/src/test/java/cn/escheduler/server/master/MasterCommandTest.java deleted file mode 100644 index 20852edc15..0000000000 --- a/escheduler-server/src/test/java/cn/escheduler/server/master/MasterCommandTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master; - -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.enums.FailureStrategy; -import cn.escheduler.common.enums.TaskDependType; -import cn.escheduler.common.enums.WarningType; -import cn.escheduler.common.graph.DAG; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.common.model.TaskNodeRelation; -import cn.escheduler.common.process.ProcessDag; -import cn.escheduler.dao.entity.Command; -import cn.escheduler.dao.entity.ProcessDefinition; -import cn.escheduler.dao.mapper.CommandMapper; -import cn.escheduler.dao.mapper.ProcessDefinitionMapper; -import cn.escheduler.dao.utils.DagHelper; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; - -/** - * master test - */ -@Ignore -public class MasterCommandTest { - - private final Logger logger = LoggerFactory.getLogger(MasterCommandTest.class); - - private CommandMapper commandMapper; - - private ProcessDefinitionMapper processDefinitionMapper; - - - @Test - public void StartFromFailedCommand(){ - Command cmd = new Command(); - cmd.setCommandType(CommandType.START_FAILURE_TASK_PROCESS); - cmd.setCommandParam("{\"ProcessInstanceId\":325}"); - cmd.setProcessDefinitionId(63); - - commandMapper.insert(cmd); - - } - - @Test - public void RecoverSuspendCommand(){ - - Command cmd = new Command(); - cmd.setProcessDefinitionId(44); - cmd.setCommandParam("{\"ProcessInstanceId\":290}"); - cmd.setCommandType(CommandType.RECOVER_SUSPENDED_PROCESS); - - commandMapper.insert(cmd); - } - - - - - @Test - public void startNewProcessCommand(){ - Command cmd = new Command(); - cmd.setCommandType(CommandType.START_PROCESS); - cmd.setProcessDefinitionId(167); - cmd.setFailureStrategy(FailureStrategy.CONTINUE); - cmd.setWarningType(WarningType.NONE); - cmd.setWarningGroupId(4); - cmd.setExecutorId(19); - - commandMapper.insert(cmd); - } - - @Test - public void ToleranceCommand(){ - Command cmd = new Command(); - cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); - cmd.setCommandParam("{\"ProcessInstanceId\":816}"); - cmd.setProcessDefinitionId(15); - - commandMapper.insert(cmd); - } - - @Test - public void insertCommand(){ - Command cmd = new Command(); - cmd.setCommandType(CommandType.START_PROCESS); - cmd.setFailureStrategy(FailureStrategy.CONTINUE); - cmd.setWarningType(WarningType.ALL); - cmd.setProcessDefinitionId(72); - cmd.setExecutorId(10); - commandMapper.insert(cmd); - } - - - @Test - public void testDagHelper(){ - - ProcessDefinition processDefinition = processDefinitionMapper.selectById(19); - - try { - ProcessDag processDag = DagHelper.generateFlowDag(processDefinition.getProcessDefinitionJson(), - new ArrayList<>(), new ArrayList<>(), TaskDependType.TASK_POST); - - DAG dag = DagHelper.buildDagGraph(processDag); - Collection start = DagHelper.getStartVertex("1", dag, null); - - System.out.println(start.toString()); - - Map forbidden = DagHelper.getForbiddenTaskNodeMaps(processDefinition.getProcessDefinitionJson()); - System.out.println(forbidden); - } catch (Exception e) { - e.printStackTrace(); - } - - } - - - - -} diff --git a/escheduler-server/src/test/java/cn/escheduler/server/master/ParamsTest.java b/escheduler-server/src/test/java/cn/escheduler/server/master/ParamsTest.java deleted file mode 100644 index c3f8ab99e3..0000000000 --- a/escheduler-server/src/test/java/cn/escheduler/server/master/ParamsTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.master; - -import cn.escheduler.common.enums.CommandType; -import cn.escheduler.common.enums.DataType; -import cn.escheduler.common.enums.Direct; -import cn.escheduler.common.process.Property; -import cn.escheduler.common.utils.ParameterUtils; -import cn.escheduler.common.utils.placeholder.BusinessTimeUtils; -import cn.escheduler.server.utils.ParamUtils; -import com.alibaba.fastjson.JSON; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Calendar; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - - -/** - * user define param - */ -public class ParamsTest { - - private static final Logger logger = LoggerFactory.getLogger(ParamsTest.class); - - - @Test - public void systemParamsTest()throws Exception{ - String command = "${system.biz.date}"; - - // start process - Map timeParams = BusinessTimeUtils - .getBusinessTime(CommandType.START_PROCESS, - new Date()); - - command = ParameterUtils.convertParameterPlaceholders(command, timeParams); - - logger.info("start process : {}",command); - - - Calendar calendar = Calendar.getInstance(); - calendar.setTime(new Date()); - calendar.add(Calendar.DAY_OF_MONTH, -5); - - - command = "${system.biz.date}"; - // complement data - timeParams = BusinessTimeUtils - .getBusinessTime(CommandType.COMPLEMENT_DATA, - calendar.getTime()); - command = ParameterUtils.convertParameterPlaceholders(command, timeParams); - logger.info("complement data : {}",command); - - } - - @Test - public void convertTest()throws Exception{ - Map globalParams = new HashMap<>(); - Property property = new Property(); - property.setProp("global_param"); - property.setDirect(Direct.IN); - property.setType(DataType.VARCHAR); - property.setValue("${system.biz.date}"); - globalParams.put("global_param",property); - - Map globalParamsMap = new HashMap<>(); - globalParamsMap.put("global_param","${system.biz.date}"); - - - Map localParams = new HashMap<>(); - Property localProperty = new Property(); - localProperty.setProp("local_param"); - localProperty.setDirect(Direct.IN); - localProperty.setType(DataType.VARCHAR); - localProperty.setValue("${global_param}"); - localParams.put("local_param", localProperty); - - Map paramsMap = ParamUtils.convert(globalParams, globalParamsMap, - localParams, CommandType.START_PROCESS, new Date()); - logger.info(JSON.toJSONString(paramsMap)); - - - } -} \ No newline at end of file diff --git a/escheduler-server/src/test/java/cn/escheduler/server/worker/EnvFileTest.java b/escheduler-server/src/test/java/cn/escheduler/server/worker/EnvFileTest.java deleted file mode 100644 index 341a74e20c..0000000000 --- a/escheduler-server/src/test/java/cn/escheduler/server/worker/EnvFileTest.java +++ /dev/null @@ -1,64 +0,0 @@ -package cn.escheduler.server.worker; - -import org.apache.commons.lang.StringUtils; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStreamReader; - - -public class EnvFileTest { - - private static final Logger logger = LoggerFactory.getLogger(EnvFileTest.class); - - @Test - public void test() { - String path = System.getProperty("user.dir")+"/script/env/.escheduler_env.sh"; - String pythonHome = getPythonHome(path); - logger.info(pythonHome); - } - - /** - * get python home - * @param path - * @return - */ - private static String getPythonHome(String path){ - BufferedReader br = null; - String line = null; - StringBuilder sb = new StringBuilder(); - try { - br = new BufferedReader(new InputStreamReader(new FileInputStream(path))); - while ((line = br.readLine()) != null){ - if (line.contains("PYTHON_HOME")){ - sb.append(line); - break; - } - } - String result = sb.toString(); - if (StringUtils.isEmpty(result)){ - return null; - } - String[] arrs = result.split("="); - if (arrs.length == 2){ - return arrs[1]; - } - - }catch (IOException e){ - logger.error("read file failed : " + e.getMessage(),e); - }finally { - try { - if (br != null){ - br.close(); - } - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - } - return null; - } -} diff --git a/escheduler-server/src/test/java/cn/escheduler/server/worker/shell/ShellCommandExecutorTest.java b/escheduler-server/src/test/java/cn/escheduler/server/worker/shell/ShellCommandExecutorTest.java deleted file mode 100644 index 4142850862..0000000000 --- a/escheduler-server/src/test/java/cn/escheduler/server/worker/shell/ShellCommandExecutorTest.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.shell; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.server.utils.LoggerUtils; -import cn.escheduler.server.worker.task.AbstractTask; -import cn.escheduler.server.worker.task.TaskManager; -import cn.escheduler.server.worker.task.TaskProps; -import com.alibaba.fastjson.JSONObject; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Date; - -/** - * python shell command executor test - */ -@Ignore -public class ShellCommandExecutorTest { - - private static final Logger logger = LoggerFactory.getLogger(ShellCommandExecutorTest.class); - - private ProcessDao processDao = null; - - @Before - public void before(){ - processDao = DaoFactory.getDaoInstance(ProcessDao.class); - } - - @Test - public void test() throws Exception { - - TaskProps taskProps = new TaskProps(); - // processDefineId_processInstanceId_taskInstanceId - taskProps.setTaskDir("/opt/soft/program/tmp/escheduler/exec/flow/5/36/2864/7657"); - taskProps.setTaskAppId("36_2864_7657"); - // set tenant -> task execute linux user - taskProps.setTenantCode("hdfs"); - taskProps.setTaskStartTime(new Date()); - taskProps.setTaskTimeout(360000); - taskProps.setTaskInstId(7657); - - - - TaskInstance taskInstance = processDao.findTaskInstanceById(7657); - - String taskJson = taskInstance.getTaskJson(); - TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); - taskProps.setTaskParams(taskNode.getParams()); - - - // custom logger - Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, - taskInstance.getProcessDefinitionId(), - taskInstance.getProcessInstanceId(), - taskInstance.getId())); - - - AbstractTask task = TaskManager.newTask(taskInstance.getTaskType(), taskProps, taskLogger); - - logger.info("task info : {}", task); - - // job init - task.init(); - - // job handle - task.handle(); - ExecutionStatus status = ExecutionStatus.SUCCESS; - - if (task.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ - status = ExecutionStatus.SUCCESS; - }else if (task.getExitStatusCode() == Constants.EXIT_CODE_KILL){ - status = ExecutionStatus.KILL; - }else { - status = ExecutionStatus.FAILURE; - } - - logger.info(status.toString()); - } -} \ No newline at end of file diff --git a/escheduler-server/src/test/java/cn/escheduler/server/worker/sql/SqlExecutorTest.java b/escheduler-server/src/test/java/cn/escheduler/server/worker/sql/SqlExecutorTest.java deleted file mode 100644 index b5dda9d16e..0000000000 --- a/escheduler-server/src/test/java/cn/escheduler/server/worker/sql/SqlExecutorTest.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.sql; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.enums.ExecutionStatus; -import cn.escheduler.common.model.TaskNode; -import cn.escheduler.dao.DaoFactory; -import cn.escheduler.dao.ProcessDao; -import cn.escheduler.dao.entity.TaskInstance; -import cn.escheduler.server.utils.LoggerUtils; -import cn.escheduler.server.worker.task.AbstractTask; -import cn.escheduler.server.worker.task.TaskManager; -import cn.escheduler.server.worker.task.TaskProps; -import com.alibaba.fastjson.JSONObject; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Date; - -/** - * python shell command executor test - */ -@Ignore -public class SqlExecutorTest { - - private static final Logger logger = LoggerFactory.getLogger(SqlExecutorTest.class); - - private ProcessDao processDao = null; - - @Before - public void before(){ - processDao = DaoFactory.getDaoInstance(ProcessDao.class); - } - - @Test - public void test() throws Exception { - String nodeName = "mysql sql test"; - String taskAppId = "51_11282_263978"; - String tenantCode = "hdfs"; - int taskInstId = 263978; - sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId); - } - - @Test - public void testClickhouse() throws Exception { - String nodeName = "ClickHouse sql test"; - String taskAppId = "1_11_20"; - String tenantCode = "default"; - int taskInstId = 20; - sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId); - } - - @Test - public void testOracle() throws Exception { - String nodeName = "oracle sql test"; - String taskAppId = "2_13_25"; - String tenantCode = "demo"; - int taskInstId = 25; - sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId); - } - - @Test - public void testSQLServer() throws Exception { - String nodeName = "SQL Server sql test"; - String taskAppId = "3_14_27"; - String tenantCode = "demo"; - int taskInstId = 27; - sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId); - } - - /** - * Basic test template for SQLTasks, mainly test different types of DBMS types - * @param nodeName node name for selected task - * @param taskAppId task app id - * @param tenantCode tenant code - * @param taskInstId task instance id - * @throws Exception - */ - private void sharedTestSqlTask(String nodeName, String taskAppId, String tenantCode, int taskInstId) throws Exception { - TaskProps taskProps = new TaskProps(); - taskProps.setTaskDir(""); - // processDefineId_processInstanceId_taskInstanceId - taskProps.setTaskAppId(taskAppId); - // set tenant -> task execute linux user - taskProps.setTenantCode(tenantCode); - taskProps.setTaskStartTime(new Date()); - taskProps.setTaskTimeout(360000); - taskProps.setTaskInstId(taskInstId); - taskProps.setNodeName(nodeName); - - - - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); - - String taskJson = taskInstance.getTaskJson(); - TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); - taskProps.setTaskParams(taskNode.getParams()); - - - // custom logger - Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, - taskInstance.getProcessDefinitionId(), - taskInstance.getProcessInstanceId(), - taskInstance.getId())); - - - AbstractTask task = TaskManager.newTask(taskInstance.getTaskType(), taskProps, taskLogger); - - logger.info("task info : {}", task); - - // job init - task.init(); - - // job handle - task.handle(); - ExecutionStatus status = ExecutionStatus.SUCCESS; - - if (task.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ - status = ExecutionStatus.SUCCESS; - }else if (task.getExitStatusCode() == Constants.EXIT_CODE_KILL){ - status = ExecutionStatus.KILL; - }else { - status = ExecutionStatus.FAILURE; - } - - logger.info(status.toString()); - } -} \ No newline at end of file diff --git a/escheduler-server/src/test/java/cn/escheduler/server/worker/task/dependent/DependentTaskTest.java b/escheduler-server/src/test/java/cn/escheduler/server/worker/task/dependent/DependentTaskTest.java deleted file mode 100644 index 1dcb5a6c7e..0000000000 --- a/escheduler-server/src/test/java/cn/escheduler/server/worker/task/dependent/DependentTaskTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.worker.task.dependent; - -import cn.escheduler.common.Constants; -import cn.escheduler.server.worker.task.TaskProps; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DependentTaskTest { - - private static final Logger logger = LoggerFactory.getLogger(DependentTaskTest.class); - - - @Test - public void testDependInit(){ - - TaskProps taskProps = new TaskProps(); - - String dependString = "{\n" + - "\"dependTaskList\":[\n" + - " {\n" + - " \"dependItemList\":[\n" + - " {\n" + - " \"definitionId\": 101,\n" + - " \"depTasks\": \"ALL\",\n" + - " \"cycle\": \"day\",\n" + - " \"dateValue\": \"last1Day\"\n" + - " }\n" + - " ],\n" + - " \"relation\": \"AND\"\n" + - " }\n" + - " ],\n" + - "\"relation\":\"OR\"\n" + - "}"; - - taskProps.setTaskInstId(252612); - taskProps.setDependence(dependString); - DependentTask dependentTask = new DependentTask(taskProps, logger); - dependentTask.init(); - dependentTask.handle(); - Assert.assertEquals(dependentTask.getExitStatusCode(), Constants.EXIT_CODE_FAILURE ); - } - - - -} \ No newline at end of file diff --git a/escheduler-server/src/test/java/cn/escheduler/server/zk/StandaloneZKServerForTest.java b/escheduler-server/src/test/java/cn/escheduler/server/zk/StandaloneZKServerForTest.java deleted file mode 100644 index 078dbcff01..0000000000 --- a/escheduler-server/src/test/java/cn/escheduler/server/zk/StandaloneZKServerForTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package cn.escheduler.server.zk; - -import cn.escheduler.common.thread.ThreadPoolExecutors; -import org.apache.zookeeper.server.ServerConfig; -import org.apache.zookeeper.server.ZooKeeperServerMain; -import org.apache.zookeeper.server.quorum.QuorumPeerConfig; -import org.junit.Before; -import org.junit.Ignore; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.util.Properties; - - -/** - * just for test - */ -@Ignore -public class StandaloneZKServerForTest { - - private static final Logger logger = LoggerFactory.getLogger(StandaloneZKServerForTest.class); - - private static volatile ZooKeeperServerMain zkServer = null; - - - @Before - public void before() { - logger.info("standalone zookeeper server for test service start "); - - ThreadPoolExecutors.getInstance().execute(new Runnable() { - @Override - public void run() { - - //delete zk data dir ? - File zkFile = new File(System.getProperty("java.io.tmpdir"), "zookeeper"); -// if(zkFile.exists()){ -// zkFile.delete(); -// } - startStandaloneServer("2000", zkFile.getAbsolutePath(), "2181", "10", "5"); - } - }); - - } - - - /** - * start zk server - * @param tickTime zookeeper ticktime - * @param dataDir zookeeper data dir - * @param clientPort zookeeper client port - * @param initLimit zookeeper init limit - * @param syncLimit zookeeper sync limit - */ - private void startStandaloneServer(String tickTime, String dataDir, String clientPort, String initLimit, String syncLimit) { - Properties props = new Properties(); - props.setProperty("tickTime", tickTime); - props.setProperty("dataDir", dataDir); - props.setProperty("clientPort", clientPort); - props.setProperty("initLimit", initLimit); - props.setProperty("syncLimit", syncLimit); - - QuorumPeerConfig quorumConfig = new QuorumPeerConfig(); - try { - quorumConfig.parseProperties(props); - - if(zkServer == null ){ - - synchronized (StandaloneZKServerForTest.class){ - if(zkServer == null ){ - zkServer = new ZooKeeperServerMain(); - final ServerConfig config = new ServerConfig(); - config.readFrom(quorumConfig); - zkServer.runFromConfig(config); - } - } - - } - - } catch (Exception e) { - logger.error("start standalone server fail!", e); - } - } - - -} \ No newline at end of file diff --git a/escheduler-server/src/test/java/cn/escheduler/server/zk/ZKWorkerClientTest.java b/escheduler-server/src/test/java/cn/escheduler/server/zk/ZKWorkerClientTest.java deleted file mode 100644 index 005b338cb7..0000000000 --- a/escheduler-server/src/test/java/cn/escheduler/server/zk/ZKWorkerClientTest.java +++ /dev/null @@ -1,37 +0,0 @@ -package cn.escheduler.server.zk; - -import cn.escheduler.common.Constants; -import cn.escheduler.common.zk.AbstractZKClient; -import org.junit.Assert; -import org.junit.Test; - -import java.util.Arrays; -import java.util.List; - -import static org.junit.Assert.*; - -/** - * - */ -public class ZKWorkerClientTest { - - @Test - public void getZKWorkerClient() throws Exception { - - -// ZKWorkerClient zkWorkerClient = ZKWorkerClient.getZKWorkerClient(); -// zkWorkerClient.removeDeadServerByHost("127.0.0.1", Constants.WORKER_PREFIX); - - - } - - @Test - public void test(){ - String ips = ""; - - List ipList = Arrays.asList(ips.split(",")); - - - Assert.assertEquals(1, ipList.size()); - } -} \ No newline at end of file diff --git a/escheduler-ui/install-escheduler-ui.sh b/escheduler-ui/install-escheduler-ui.sh deleted file mode 100755 index 3c9578746a..0000000000 --- a/escheduler-ui/install-escheduler-ui.sh +++ /dev/null @@ -1,219 +0,0 @@ -#!/bin/bash -# current path -esc_basepath=$(cd `dirname $0`; pwd) - -menu(){ - cat <> /etc/nginx/conf.d/dolphinscheduler.conf - -} - -ubuntu(){ - # update source - apt-get update - - # install nginx - apt-get install -y nginx - - # config nginx - dolphinschedulerConf $1 $2 - - # startup nginx - /etc/init.d/nginx start - sleep 1 - if [ $? -ne 0 ];then - /etc/init.d/nginx start - fi - nginx -s reload -} - -centos7(){ - - rpm -Uvh http://nginx.org/packages/centos/7/noarch/RPMS/nginx-release-centos-7-0.el7.ngx.noarch.rpm - yum install -y nginx - - # config nginx - dolphinschedulerConf $1 $2 - - # solve 0.0.0.0:8888 problem - yum -y install policycoreutils-python - semanage port -a -t http_port_t -p tcp $esc_proxy - - # open front access port - firewall-cmd --zone=public --add-port=$esc_proxy/tcp --permanent - - # startup nginx - systemctl start nginx - sleep 1 - if [ $? -ne 0 ];then - systemctl start nginx - fi - nginx -s reload - - # set SELinux parameters - sed -i "s/SELINUX=enforcing/SELINUX=disabled/g" /etc/selinux/config - # temporary effect - setenforce 0 - -} - - -centos6(){ - - rpm -ivh http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm - - # install nginx - yum install nginx -y - - # config nginx - dolphinschedulerConf $1 $2 - - # startup nginx - /etc/init.d/nginx start - sleep 1 - if [ $? -ne 0 ];then - /etc/init.d/nginx start - fi - nginx -s reload - - # set SELinux parameters - sed -i "s/SELINUX=enforcing/SELINUX=disabled/g" /etc/selinux/config - - # temporary effect - setenforce 0 - -} - -function main(){ - echo "Welcome to thedolphinscheduler front-end deployment script, which is currently only supported by front-end deployment scripts : CentOS and Ubuntu" - echo "Please execute in the dolphinscheduler-ui directory" - - #To be compatible with MacOS and Linux - if [[ "$OSTYPE" == "darwin"* ]]; then - # Mac OSX - echo "Easy Scheduler ui install not support Mac OSX operating system" - exit 1 - elif [[ "$OSTYPE" == "linux-gnu" ]]; then - # linux - echo "linux" - elif [[ "$OSTYPE" == "cygwin" ]]; then - # POSIX compatibility layer and Linux environment emulation for Windows - echo "Easy Scheduler ui not support Windows operating system" - exit 1 - elif [[ "$OSTYPE" == "msys" ]]; then - # Lightweight shell and GNU utilities compiled for Windows (part of MinGW) - echo "Easy Scheduler ui not support Windows operating system" - exit 1 - elif [[ "$OSTYPE" == "win32" ]]; then - echo "Easy Scheduler ui not support Windows operating system" - exit 1 - elif [[ "$OSTYPE" == "freebsd"* ]]; then - # ... - echo "freebsd" - else - # Unknown. - echo "Operating system unknown, please tell us(submit issue) for better service" - exit 1 - fi - - - # config front-end access ports - read -p "Please enter the nginx proxy port, do not enter, the default is 8888 :" esc_proxy_port - if [ -z "${esc_proxy_port}" ];then - esc_proxy_port="8888" - fi - - read -p "Please enter the api server proxy ip, you must enter, for example: 192.168.xx.xx :" esc_api_server_ip - if [ -z "${esc_api_server_ip}" ];then - echo "api server proxy ip can not be empty." - exit 1 - fi - - read -p "Please enter the api server proxy port, do not enter, the default is 12345:" esc_api_server_port - if [ -z "${esc_api_server_port}" ];then - esc_api_server_port="12345" - fi - - # api server backend address - esc_api_server="http://$esc_api_server_ip:$esc_api_server_port" - - # local ip address - esc_ipaddr=$(ip a | grep inet | grep -v inet6 | grep -v 127 | sed 's/^[ \t]*//g' | cut -d ' ' -f2 | head -n 1 | awk -F '/' '{print $1}') - - # Prompt message - menu - - read -p "Please enter the installation number(1|2|3|4):" num - - case $num in - 1) - centos6 ${esc_proxy_port} ${esc_api_server} - ;; - 2) - centos7 ${esc_proxy_port} ${esc_api_server} - ;; - 3) - ubuntu ${esc_proxy_port} ${esc_api_server} - ;; - 4) - echo $"Usage :sh $0" - exit 1 - ;; - *) - echo $"Usage :sh $0" - exit 1 - esac - echo "Please visit the browser:http://${esc_ipaddr}:${esc_proxy_port}" - -} - -main diff --git a/package.xml b/package.xml index 153dceec9b..a4ed5abe07 100644 --- a/package.xml +++ b/package.xml @@ -11,7 +11,7 @@ - escheduler-server/target/escheduler-server-${project.version} + dolphinscheduler-server/target/dolphinscheduler-server-${project.version} **/*.* @@ -19,7 +19,7 @@ - escheduler-api/target/escheduler-api-${project.version} + dolphinscheduler-api/target/dolphinscheduler-api-${project.version} **/*.* @@ -27,7 +27,7 @@ - escheduler-alert/target/escheduler-alert-${project.version} + dolphinscheduler-alert/target/dolphinscheduler-alert-${project.version} **/*.* @@ -35,7 +35,7 @@ - escheduler-ui/dist + dolphinscheduler-ui/dist **/*.* @@ -72,7 +72,7 @@ start-all.sh stop-all.sh - escheduler-daemon.sh + dolphinscheduler-daemon.sh ./bin diff --git a/pom.xml b/pom.xml index d21714a6cc..0fd2ba1bc8 100644 --- a/pom.xml +++ b/pom.xml @@ -1,11 +1,11 @@ 4.0.0 - cn.analysys - escheduler + org.apache.dolphinscheduler + dolphinscheduler 1.1.0-SNAPSHOT pom - escheduler + dolphinscheduler http://maven.apache.org @@ -143,33 +143,33 @@ - cn.analysys - escheduler-server + org.apache.dolphinscheduler + dolphinscheduler-server ${project.version} - cn.analysys - escheduler-common + org.apache.dolphinscheduler + dolphinscheduler-common ${project.version} - cn.analysys - escheduler-dao + org.apache.dolphinscheduler + dolphinscheduler-dao ${project.version} - cn.analysys - escheduler-api + org.apache.dolphinscheduler + dolphinscheduler-api ${project.version} - cn.analysys - escheduler-rpc + org.apache.dolphinscheduler + dolphinscheduler-rpc ${project.version} - cn.analysys - escheduler-alert + org.apache.dolphinscheduler + dolphinscheduler-alert ${project.version} @@ -511,12 +511,12 @@ - escheduler-server - escheduler-common - escheduler-api - escheduler-dao - escheduler-alert - escheduler-rpc + dolphinscheduler-server + dolphinscheduler-common + dolphinscheduler-api + dolphinscheduler-dao + dolphinscheduler-alert + dolphinscheduler-rpc diff --git a/script/create-escheduler.sh b/script/create-dolphinscheduler.sh similarity index 100% rename from script/create-escheduler.sh rename to script/create-dolphinscheduler.sh diff --git a/script/escheduler-daemon.sh b/script/dolphinscheduler-daemon.sh similarity index 100% rename from script/escheduler-daemon.sh rename to script/dolphinscheduler-daemon.sh diff --git a/script/upgrade-escheduler.sh b/script/upgrade-dolphinscheduler.sh similarity index 100% rename from script/upgrade-escheduler.sh rename to script/upgrade-dolphinscheduler.sh